save progress

This commit is contained in:
StellaOps Bot
2026-01-06 09:42:02 +02:00
parent 94d68bee8b
commit 37e11918e0
443 changed files with 85863 additions and 897 deletions

View File

@@ -18,7 +18,7 @@
</PropertyGroup>
<PropertyGroup>
<NoWarn>1701;1702;1591;CA1416;SYSLIB0004</NoWarn>
<NoWarn>1701;1702;1591;CA1416;SYSLIB0004;CS8600;CS8601;CS8602;CS8603;CS8604;CS8605;CS8618;CS8625;CS8765;CS8767;CS0472;CS0419</NoWarn>
<PackageId>GostCryptography</PackageId>
<Title>GostCryptography</Title>
<Version>$(GostCryptographyVersion)</Version>

View File

@@ -131,7 +131,7 @@ public class LocalEidasProvider
{
if (options.Type.Equals("PKCS12", StringComparison.OrdinalIgnoreCase))
{
var cert = new X509Certificate2(
var cert = X509CertificateLoader.LoadPkcs12FromFile(
options.Path,
options.Password,
X509KeyStorageFlags.Exportable);

View File

@@ -0,0 +1,627 @@
// <copyright file="FacetDriftDetectorTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using Xunit;
namespace StellaOps.Facet.Tests;
/// <summary>
/// Tests for <see cref="FacetDriftDetector"/>.
/// </summary>
[Trait("Category", "Unit")]
public sealed class FacetDriftDetectorTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly FacetDriftDetector _detector;
public FacetDriftDetectorTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 6, 12, 0, 0, TimeSpan.Zero));
_detector = new FacetDriftDetector(_timeProvider);
}
#region Helper Methods
private static FacetSeal CreateBaseline(
params FacetEntry[] facets)
{
return new FacetSeal
{
ImageDigest = "sha256:baseline123",
CreatedAt = DateTimeOffset.UtcNow.AddDays(-1),
Facets = [.. facets],
CombinedMerkleRoot = "sha256:combined123"
};
}
private static FacetSeal CreateBaselineWithQuotas(
ImmutableDictionary<string, FacetQuota> quotas,
params FacetEntry[] facets)
{
return new FacetSeal
{
ImageDigest = "sha256:baseline123",
CreatedAt = DateTimeOffset.UtcNow.AddDays(-1),
Facets = [.. facets],
Quotas = quotas,
CombinedMerkleRoot = "sha256:combined123"
};
}
private static FacetSeal CreateCurrent(
params FacetEntry[] facets)
{
return new FacetSeal
{
ImageDigest = "sha256:current456",
CreatedAt = DateTimeOffset.UtcNow,
Facets = [.. facets],
CombinedMerkleRoot = "sha256:combined456"
};
}
private static FacetEntry CreateFacetEntry(
string facetId,
string merkleRoot,
int fileCount,
ImmutableArray<FacetFileEntry>? files = null)
{
return new FacetEntry
{
FacetId = facetId,
Name = facetId,
Category = FacetCategory.OsPackages,
Selectors = ["/var/lib/dpkg/**"],
MerkleRoot = merkleRoot,
FileCount = fileCount,
TotalBytes = fileCount * 1024,
Files = files
};
}
private static FacetFileEntry CreateFile(string path, string digest, long size = 1024)
{
return new FacetFileEntry(path, digest, size, DateTimeOffset.UtcNow);
}
#endregion
#region No Drift Tests
[Fact]
public async Task DetectDriftAsync_IdenticalSeals_ReturnsNoDrift()
{
// Arrange
var files = ImmutableArray.Create(
CreateFile("/etc/file1.conf", "sha256:aaa"),
CreateFile("/etc/file2.conf", "sha256:bbb"));
var facet = CreateFacetEntry("os-packages-dpkg", "sha256:root123", 2, files);
var baseline = CreateBaseline(facet);
var current = CreateCurrent(facet);
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.Should().NotBeNull();
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
report.TotalChangedFiles.Should().Be(0);
report.FacetDrifts.Should().HaveCount(1);
report.FacetDrifts[0].HasDrift.Should().BeFalse();
}
[Fact]
public async Task DetectDriftAsync_SameMerkleRoot_ReturnsNoDrift()
{
// Arrange - same root but files not provided = fast path
var baseline = CreateBaseline(
CreateFacetEntry("os-packages-dpkg", "sha256:sameroot", 10));
var current = CreateCurrent(
CreateFacetEntry("os-packages-dpkg", "sha256:sameroot", 10));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
report.FacetDrifts[0].DriftScore.Should().Be(0);
}
#endregion
#region File Addition Tests
[Fact]
public async Task DetectDriftAsync_FilesAdded_ReportsAdditions()
{
// Arrange
var baselineFiles = ImmutableArray.Create(
CreateFile("/usr/bin/app1", "sha256:aaa"));
var currentFiles = ImmutableArray.Create(
CreateFile("/usr/bin/app1", "sha256:aaa"),
CreateFile("/usr/bin/app2", "sha256:bbb"));
var baseline = CreateBaseline(
CreateFacetEntry("binaries-usr", "sha256:root1", 1, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("binaries-usr", "sha256:root2", 2, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.FacetDrifts.Should().HaveCount(1);
var drift = report.FacetDrifts[0];
drift.Added.Should().HaveCount(1);
drift.Added[0].Path.Should().Be("/usr/bin/app2");
drift.Removed.Should().BeEmpty();
drift.Modified.Should().BeEmpty();
drift.HasDrift.Should().BeTrue();
}
#endregion
#region File Removal Tests
[Fact]
public async Task DetectDriftAsync_FilesRemoved_ReportsRemovals()
{
// Arrange
var baselineFiles = ImmutableArray.Create(
CreateFile("/usr/bin/app1", "sha256:aaa"),
CreateFile("/usr/bin/app2", "sha256:bbb"));
var currentFiles = ImmutableArray.Create(
CreateFile("/usr/bin/app1", "sha256:aaa"));
var baseline = CreateBaseline(
CreateFacetEntry("binaries-usr", "sha256:root1", 2, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("binaries-usr", "sha256:root2", 1, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
var drift = report.FacetDrifts[0];
drift.Removed.Should().HaveCount(1);
drift.Removed[0].Path.Should().Be("/usr/bin/app2");
drift.Added.Should().BeEmpty();
drift.Modified.Should().BeEmpty();
}
#endregion
#region File Modification Tests
[Fact]
public async Task DetectDriftAsync_FilesModified_ReportsModifications()
{
// Arrange
var baselineFiles = ImmutableArray.Create(
CreateFile("/etc/config.yaml", "sha256:oldhash", 512));
var currentFiles = ImmutableArray.Create(
CreateFile("/etc/config.yaml", "sha256:newhash", 1024));
var baseline = CreateBaseline(
CreateFacetEntry("config-files", "sha256:root1", 1, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("config-files", "sha256:root2", 1, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
var drift = report.FacetDrifts[0];
drift.Modified.Should().HaveCount(1);
drift.Modified[0].Path.Should().Be("/etc/config.yaml");
drift.Modified[0].PreviousDigest.Should().Be("sha256:oldhash");
drift.Modified[0].CurrentDigest.Should().Be("sha256:newhash");
drift.Modified[0].PreviousSizeBytes.Should().Be(512);
drift.Modified[0].CurrentSizeBytes.Should().Be(1024);
drift.Added.Should().BeEmpty();
drift.Removed.Should().BeEmpty();
}
#endregion
#region Mixed Changes Tests
[Fact]
public async Task DetectDriftAsync_MixedChanges_ReportsAllTypes()
{
// Arrange
var baselineFiles = ImmutableArray.Create(
CreateFile("/usr/bin/keep", "sha256:keep"),
CreateFile("/usr/bin/modify", "sha256:old"),
CreateFile("/usr/bin/remove", "sha256:gone"));
var currentFiles = ImmutableArray.Create(
CreateFile("/usr/bin/keep", "sha256:keep"),
CreateFile("/usr/bin/modify", "sha256:new"),
CreateFile("/usr/bin/add", "sha256:added"));
var baseline = CreateBaseline(
CreateFacetEntry("binaries", "sha256:root1", 3, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("binaries", "sha256:root2", 3, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
var drift = report.FacetDrifts[0];
drift.Added.Should().HaveCount(1);
drift.Removed.Should().HaveCount(1);
drift.Modified.Should().HaveCount(1);
drift.TotalChanges.Should().Be(3);
}
#endregion
#region Quota Enforcement Tests
[Fact]
public async Task DetectDriftAsync_WithinQuota_ReturnsOk()
{
// Arrange - 1 change out of 10 = 10% churn, quota is 15%
var baselineFiles = Enumerable.Range(1, 10)
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
.ToImmutableArray();
var currentFiles = baselineFiles
.Take(9)
.Append(CreateFile("/file10", "sha256:changed"))
.ToImmutableArray();
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("test-facet", new FacetQuota { MaxChurnPercent = 15, MaxChangedFiles = 5 });
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("test-facet", "sha256:root1", 10, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("test-facet", "sha256:root2", 10, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
}
[Fact]
public async Task DetectDriftAsync_ExceedsChurnPercent_ReturnsWarning()
{
// Arrange - 3 changes out of 10 = 30% churn, quota is 10%
var baselineFiles = Enumerable.Range(1, 10)
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
.ToImmutableArray();
var currentFiles = baselineFiles
.Take(7)
.Concat(Enumerable.Range(11, 3).Select(i => CreateFile($"/file{i}", $"sha256:new{i}")))
.ToImmutableArray();
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("test-facet", new FacetQuota
{
MaxChurnPercent = 10,
MaxChangedFiles = 100,
Action = QuotaExceededAction.Warn
});
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("test-facet", "sha256:root1", 10, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("test-facet", "sha256:root2", 10, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.Warning);
}
[Fact]
public async Task DetectDriftAsync_ExceedsMaxFiles_WithBlockAction_ReturnsBlocked()
{
// Arrange - 6 changes, quota is max 5 files with block action
var baselineFiles = Enumerable.Range(1, 100)
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
.ToImmutableArray();
var currentFiles = baselineFiles
.Take(94)
.Concat(Enumerable.Range(101, 6).Select(i => CreateFile($"/file{i}", $"sha256:new{i}")))
.ToImmutableArray();
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("binaries", new FacetQuota
{
MaxChurnPercent = 100,
MaxChangedFiles = 5,
Action = QuotaExceededAction.Block
});
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("binaries", "sha256:root1", 100, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("binaries", "sha256:root2", 100, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.Blocked);
report.FacetDrifts[0].QuotaVerdict.Should().Be(QuotaVerdict.Blocked);
}
[Fact]
public async Task DetectDriftAsync_ExceedsQuota_WithRequireVex_ReturnsRequiresVex()
{
// Arrange
var baselineFiles = ImmutableArray.Create(
CreateFile("/deps/package.json", "sha256:old"));
var currentFiles = ImmutableArray.Create(
CreateFile("/deps/package.json", "sha256:new"),
CreateFile("/deps/package-lock.json", "sha256:lock"));
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("lang-deps", new FacetQuota
{
MaxChurnPercent = 50,
MaxChangedFiles = 1,
Action = QuotaExceededAction.RequireVex
});
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("lang-deps", "sha256:root1", 1, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("lang-deps", "sha256:root2", 2, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.RequiresVex);
}
#endregion
#region Allowlist Tests
[Fact]
public async Task DetectDriftAsync_AllowlistedFiles_AreExcludedFromDrift()
{
// Arrange - changes to allowlisted paths should be ignored
var baselineFiles = ImmutableArray.Create(
CreateFile("/var/lib/dpkg/status", "sha256:old"),
CreateFile("/usr/bin/app", "sha256:app"));
var currentFiles = ImmutableArray.Create(
CreateFile("/var/lib/dpkg/status", "sha256:new"), // Allowlisted
CreateFile("/usr/bin/app", "sha256:app"));
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("os-packages", new FacetQuota
{
MaxChurnPercent = 0,
MaxChangedFiles = 0,
Action = QuotaExceededAction.Block,
AllowlistGlobs = ["/var/lib/dpkg/**"]
});
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("os-packages", "sha256:root1", 2, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("os-packages", "sha256:root2", 2, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
report.FacetDrifts[0].Modified.Should().BeEmpty();
}
#endregion
#region Multi-Facet Tests
[Fact]
public async Task DetectDriftAsync_MultipleFacets_ReturnsWorstVerdict()
{
// Arrange - one facet OK, one blocked
var okFiles = ImmutableArray.Create(CreateFile("/ok/file", "sha256:same"));
var blockFiles = ImmutableArray.Create(
CreateFile("/block/file1", "sha256:old1"),
CreateFile("/block/file2", "sha256:old2"));
var blockCurrentFiles = ImmutableArray.Create(
CreateFile("/block/file1", "sha256:new1"),
CreateFile("/block/file2", "sha256:new2"));
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("ok-facet", FacetQuota.Default)
.Add("block-facet", new FacetQuota
{
MaxChurnPercent = 0,
Action = QuotaExceededAction.Block
});
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("ok-facet", "sha256:ok1", 1, okFiles),
CreateFacetEntry("block-facet", "sha256:block1", 2, blockFiles));
var current = CreateCurrent(
CreateFacetEntry("ok-facet", "sha256:ok1", 1, okFiles),
CreateFacetEntry("block-facet", "sha256:block2", 2, blockCurrentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.OverallVerdict.Should().Be(QuotaVerdict.Blocked);
report.FacetDrifts.Should().HaveCount(2);
report.FacetDrifts.First(d => d.FacetId == "ok-facet").QuotaVerdict.Should().Be(QuotaVerdict.Ok);
report.FacetDrifts.First(d => d.FacetId == "block-facet").QuotaVerdict.Should().Be(QuotaVerdict.Blocked);
}
[Fact]
public async Task DetectDriftAsync_NewFacetAppears_ReportsAsWarning()
{
// Arrange
var baselineFiles = ImmutableArray.Create(CreateFile("/old/file", "sha256:old"));
var newFacetFiles = ImmutableArray.Create(CreateFile("/new/file", "sha256:new"));
var baseline = CreateBaseline(
CreateFacetEntry("existing-facet", "sha256:root1", 1, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("existing-facet", "sha256:root1", 1, baselineFiles),
CreateFacetEntry("new-facet", "sha256:root2", 1, newFacetFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.FacetDrifts.Should().HaveCount(2);
var newDrift = report.FacetDrifts.First(d => d.FacetId == "new-facet");
newDrift.QuotaVerdict.Should().Be(QuotaVerdict.Warning);
newDrift.Added.Should().HaveCount(1);
newDrift.BaselineFileCount.Should().Be(0);
}
[Fact]
public async Task DetectDriftAsync_FacetRemoved_ReportsAsWarningOrBlock()
{
// Arrange
var removedFacetFiles = ImmutableArray.Create(
CreateFile("/removed/file1", "sha256:gone1"),
CreateFile("/removed/file2", "sha256:gone2"));
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
.Add("removed-facet", new FacetQuota { Action = QuotaExceededAction.Block });
var baseline = CreateBaselineWithQuotas(quotas,
CreateFacetEntry("removed-facet", "sha256:root1", 2, removedFacetFiles));
var current = CreateCurrent(); // No facets
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
report.FacetDrifts.Should().HaveCount(1);
var drift = report.FacetDrifts[0];
drift.FacetId.Should().Be("removed-facet");
drift.Removed.Should().HaveCount(2);
drift.Added.Should().BeEmpty();
drift.QuotaVerdict.Should().Be(QuotaVerdict.Blocked);
}
#endregion
#region Drift Score Tests
[Fact]
public async Task DetectDriftAsync_CalculatesDriftScore_BasedOnChanges()
{
// Arrange - 2 additions, 1 removal, 1 modification out of 10 files
// Weighted: 2 + 1 + 0.5 = 3.5 / 10 * 100 = 35%
var baselineFiles = Enumerable.Range(1, 10)
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
.ToImmutableArray();
var currentFiles = baselineFiles
.Skip(1) // Remove file1
.Take(8)
.Append(CreateFile("/file10", "sha256:modified")) // Modify file10
.Append(CreateFile("/file11", "sha256:new1")) // Add 2 files
.Append(CreateFile("/file12", "sha256:new2"))
.ToImmutableArray();
var baseline = CreateBaseline(
CreateFacetEntry("test", "sha256:root1", 10, baselineFiles));
var current = CreateCurrent(
CreateFacetEntry("test", "sha256:root2", 11, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
var drift = report.FacetDrifts[0];
drift.DriftScore.Should().BeGreaterThan(0);
drift.DriftScore.Should().BeLessThanOrEqualTo(100);
drift.ChurnPercent.Should().BeGreaterThan(0);
}
#endregion
#region Edge Cases
[Fact]
public async Task DetectDriftAsync_EmptyBaseline_AllFilesAreAdditions()
{
// Arrange
var currentFiles = ImmutableArray.Create(
CreateFile("/new/file1", "sha256:new1"),
CreateFile("/new/file2", "sha256:new2"));
var baseline = CreateBaseline(
CreateFacetEntry("empty-facet", "sha256:empty", 0, []));
var current = CreateCurrent(
CreateFacetEntry("empty-facet", "sha256:root", 2, currentFiles));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
var drift = report.FacetDrifts[0];
drift.Added.Should().HaveCount(2);
drift.ChurnPercent.Should().Be(100m); // All new = 100% churn
}
[Fact]
public async Task DetectDriftAsync_NullFilesInBaseline_FallsBackToRootComparison()
{
// Arrange - no file details, different roots
var baseline = CreateBaseline(
CreateFacetEntry("no-files", "sha256:root1", 10, null));
var current = CreateCurrent(
CreateFacetEntry("no-files", "sha256:root2", 10, null));
// Act
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
// Assert
var drift = report.FacetDrifts[0];
drift.DriftScore.Should().Be(100m); // Max drift when can't compute details
}
[Fact]
public async Task DetectDriftAsync_Cancellation_ThrowsOperationCanceled()
{
// Arrange
var baseline = CreateBaseline(
CreateFacetEntry("test", "sha256:root1", 10));
var current = CreateCurrent(
CreateFacetEntry("test", "sha256:root2", 10));
var cts = new CancellationTokenSource();
cts.Cancel();
// Act & Assert
await Assert.ThrowsAsync<OperationCanceledException>(
() => _detector.DetectDriftAsync(baseline, current, cts.Token));
}
#endregion
}

View File

@@ -0,0 +1,24 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="xunit.v3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Facet\StellaOps.Facet.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,166 @@
// <copyright file="BuiltInFacets.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Built-in facet definitions for common image components.
/// </summary>
public static class BuiltInFacets
{
/// <summary>
/// Gets all built-in facet definitions.
/// </summary>
public static IReadOnlyList<IFacet> All { get; } = new IFacet[]
{
// OS Package Managers (priority 10)
new FacetDefinition(
"os-packages-dpkg",
"Debian Packages",
FacetCategory.OsPackages,
["/var/lib/dpkg/status", "/var/lib/dpkg/info/**"],
priority: 10),
new FacetDefinition(
"os-packages-rpm",
"RPM Packages",
FacetCategory.OsPackages,
["/var/lib/rpm/**", "/usr/lib/sysimage/rpm/**"],
priority: 10),
new FacetDefinition(
"os-packages-apk",
"Alpine Packages",
FacetCategory.OsPackages,
["/lib/apk/db/**"],
priority: 10),
new FacetDefinition(
"os-packages-pacman",
"Arch Packages",
FacetCategory.OsPackages,
["/var/lib/pacman/**"],
priority: 10),
// Language Interpreters (priority 15 - before lang deps)
new FacetDefinition(
"interpreters-python",
"Python Interpreters",
FacetCategory.Interpreters,
["/usr/bin/python*", "/usr/local/bin/python*"],
priority: 15),
new FacetDefinition(
"interpreters-node",
"Node.js Interpreters",
FacetCategory.Interpreters,
["/usr/bin/node*", "/usr/local/bin/node*"],
priority: 15),
new FacetDefinition(
"interpreters-ruby",
"Ruby Interpreters",
FacetCategory.Interpreters,
["/usr/bin/ruby*", "/usr/local/bin/ruby*"],
priority: 15),
new FacetDefinition(
"interpreters-perl",
"Perl Interpreters",
FacetCategory.Interpreters,
["/usr/bin/perl*", "/usr/local/bin/perl*"],
priority: 15),
// Language Dependencies (priority 20)
new FacetDefinition(
"lang-deps-npm",
"NPM Packages",
FacetCategory.LanguageDependencies,
["**/node_modules/**/package.json", "**/package-lock.json"],
priority: 20),
new FacetDefinition(
"lang-deps-pip",
"Python Packages",
FacetCategory.LanguageDependencies,
["**/site-packages/**/*.dist-info/METADATA", "**/requirements.txt"],
priority: 20),
new FacetDefinition(
"lang-deps-nuget",
"NuGet Packages",
FacetCategory.LanguageDependencies,
["**/*.deps.json", "**/.nuget/**"],
priority: 20),
new FacetDefinition(
"lang-deps-maven",
"Maven Packages",
FacetCategory.LanguageDependencies,
["**/.m2/repository/**/*.pom"],
priority: 20),
new FacetDefinition(
"lang-deps-cargo",
"Cargo Packages",
FacetCategory.LanguageDependencies,
["**/.cargo/registry/**", "**/Cargo.lock"],
priority: 20),
new FacetDefinition(
"lang-deps-go",
"Go Modules",
FacetCategory.LanguageDependencies,
["**/go.sum", "**/go/pkg/mod/**"],
priority: 20),
new FacetDefinition(
"lang-deps-gem",
"Ruby Gems",
FacetCategory.LanguageDependencies,
["**/gems/**/*.gemspec", "**/Gemfile.lock"],
priority: 20),
// Certificates (priority 25)
new FacetDefinition(
"certs-system",
"System Certificates",
FacetCategory.Certificates,
["/etc/ssl/certs/**", "/etc/pki/**", "/usr/share/ca-certificates/**"],
priority: 25),
// Binaries (priority 30)
new FacetDefinition(
"binaries-usr",
"System Binaries",
FacetCategory.Binaries,
["/usr/bin/*", "/usr/sbin/*", "/bin/*", "/sbin/*"],
priority: 30),
new FacetDefinition(
"binaries-lib",
"Shared Libraries",
FacetCategory.Binaries,
["/usr/lib/**/*.so*", "/lib/**/*.so*", "/usr/lib64/**/*.so*", "/lib64/**/*.so*"],
priority: 30),
// Configuration (priority 40)
new FacetDefinition(
"config-etc",
"System Configuration",
FacetCategory.Configuration,
["/etc/**/*.conf", "/etc/**/*.cfg", "/etc/**/*.yaml", "/etc/**/*.yml", "/etc/**/*.json"],
priority: 40),
};
/// <summary>
/// Gets a facet by its ID.
/// </summary>
/// <param name="facetId">The facet identifier.</param>
/// <returns>The facet or null if not found.</returns>
public static IFacet? GetById(string facetId)
=> All.FirstOrDefault(f => f.FacetId == facetId);
/// <summary>
/// Gets all facets in a category.
/// </summary>
/// <param name="category">The category to filter by.</param>
/// <returns>Facets in the category.</returns>
public static IEnumerable<IFacet> GetByCategory(FacetCategory category)
=> All.Where(f => f.Category == category);
/// <summary>
/// Gets facets sorted by priority (lowest first).
/// </summary>
/// <returns>Priority-sorted facets.</returns>
public static IEnumerable<IFacet> GetByPriority()
=> All.OrderBy(f => f.Priority);
}

View File

@@ -0,0 +1,53 @@
// <copyright file="DefaultCryptoHash.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Security.Cryptography;
namespace StellaOps.Facet;
/// <summary>
/// Default implementation of <see cref="ICryptoHash"/> using .NET built-in algorithms.
/// </summary>
public sealed class DefaultCryptoHash : ICryptoHash
{
/// <summary>
/// Gets the singleton instance.
/// </summary>
public static DefaultCryptoHash Instance { get; } = new();
/// <inheritdoc/>
public byte[] ComputeHash(byte[] data, string algorithm)
{
ArgumentNullException.ThrowIfNull(data);
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm);
return algorithm.ToUpperInvariant() switch
{
"SHA256" => SHA256.HashData(data),
"SHA384" => SHA384.HashData(data),
"SHA512" => SHA512.HashData(data),
"SHA1" => SHA1.HashData(data),
"MD5" => MD5.HashData(data),
_ => throw new NotSupportedException($"Hash algorithm '{algorithm}' is not supported")
};
}
/// <inheritdoc/>
public async Task<byte[]> ComputeHashAsync(
Stream stream,
string algorithm,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(stream);
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm);
return algorithm.ToUpperInvariant() switch
{
"SHA256" => await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false),
"SHA384" => await SHA384.HashDataAsync(stream, ct).ConfigureAwait(false),
"SHA512" => await SHA512.HashDataAsync(stream, ct).ConfigureAwait(false),
_ => throw new NotSupportedException($"Hash algorithm '{algorithm}' is not supported for async")
};
}
}

View File

@@ -0,0 +1,46 @@
// <copyright file="FacetCategory.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Categories for grouping facets.
/// </summary>
public enum FacetCategory
{
/// <summary>
/// OS-level package managers (dpkg, rpm, apk, pacman).
/// </summary>
OsPackages,
/// <summary>
/// Language-specific dependencies (npm, pip, nuget, maven, cargo, go).
/// </summary>
LanguageDependencies,
/// <summary>
/// Executable binaries and shared libraries.
/// </summary>
Binaries,
/// <summary>
/// Configuration files (etc, conf, yaml, json).
/// </summary>
Configuration,
/// <summary>
/// SSL/TLS certificates and trust anchors.
/// </summary>
Certificates,
/// <summary>
/// Language interpreters (python, node, ruby, perl).
/// </summary>
Interpreters,
/// <summary>
/// User-defined custom facets.
/// </summary>
Custom
}

View File

@@ -0,0 +1,91 @@
// <copyright file="FacetClassifier.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Classifies files into facets based on selectors.
/// </summary>
public sealed class FacetClassifier
{
private readonly List<(IFacet Facet, GlobMatcher Matcher)> _facetMatchers;
/// <summary>
/// Initializes a new instance of the <see cref="FacetClassifier"/> class.
/// </summary>
/// <param name="facets">Facets to classify against (will be sorted by priority).</param>
public FacetClassifier(IEnumerable<IFacet> facets)
{
ArgumentNullException.ThrowIfNull(facets);
// Sort by priority (lowest first = highest priority)
_facetMatchers = facets
.OrderBy(f => f.Priority)
.Select(f => (f, GlobMatcher.ForFacet(f)))
.ToList();
}
/// <summary>
/// Creates a classifier using built-in facets.
/// </summary>
public static FacetClassifier Default { get; } = new(BuiltInFacets.All);
/// <summary>
/// Classify a file path to a facet.
/// </summary>
/// <param name="path">The file path to classify.</param>
/// <returns>The matching facet or null if no match.</returns>
public IFacet? Classify(string path)
{
ArgumentNullException.ThrowIfNull(path);
// First matching facet wins (ordered by priority)
foreach (var (facet, matcher) in _facetMatchers)
{
if (matcher.IsMatch(path))
{
return facet;
}
}
return null;
}
/// <summary>
/// Classify a file and return the facet ID.
/// </summary>
/// <param name="path">The file path to classify.</param>
/// <returns>The facet ID or null if no match.</returns>
public string? ClassifyToId(string path)
=> Classify(path)?.FacetId;
/// <summary>
/// Classify multiple files efficiently.
/// </summary>
/// <param name="paths">The file paths to classify.</param>
/// <returns>Dictionary from facet ID to matched paths.</returns>
public Dictionary<string, List<string>> ClassifyMany(IEnumerable<string> paths)
{
ArgumentNullException.ThrowIfNull(paths);
var result = new Dictionary<string, List<string>>();
foreach (var path in paths)
{
var facet = Classify(path);
if (facet is not null)
{
if (!result.TryGetValue(facet.FacetId, out var list))
{
list = [];
result[facet.FacetId] = list;
}
list.Add(path);
}
}
return result;
}
}

View File

@@ -0,0 +1,55 @@
// <copyright file="FacetDefinition.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Standard implementation of <see cref="IFacet"/> for defining facets.
/// </summary>
internal sealed class FacetDefinition : IFacet
{
/// <inheritdoc/>
public string FacetId { get; }
/// <inheritdoc/>
public string Name { get; }
/// <inheritdoc/>
public FacetCategory Category { get; }
/// <inheritdoc/>
public IReadOnlyList<string> Selectors { get; }
/// <inheritdoc/>
public int Priority { get; }
/// <summary>
/// Initializes a new instance of the <see cref="FacetDefinition"/> class.
/// </summary>
/// <param name="facetId">Unique identifier for the facet.</param>
/// <param name="name">Human-readable name.</param>
/// <param name="category">Facet category.</param>
/// <param name="selectors">Glob patterns or paths for file matching.</param>
/// <param name="priority">Priority for conflict resolution (lower = higher priority).</param>
public FacetDefinition(
string facetId,
string name,
FacetCategory category,
string[] selectors,
int priority)
{
ArgumentException.ThrowIfNullOrWhiteSpace(facetId);
ArgumentException.ThrowIfNullOrWhiteSpace(name);
ArgumentNullException.ThrowIfNull(selectors);
FacetId = facetId;
Name = name;
Category = category;
Selectors = selectors;
Priority = priority;
}
/// <inheritdoc/>
public override string ToString() => $"{FacetId} ({Name})";
}

View File

@@ -0,0 +1,132 @@
// <copyright file="FacetDrift.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// Drift detection result for a single facet.
/// </summary>
public sealed record FacetDrift
{
/// <summary>
/// Gets the facet this drift applies to.
/// </summary>
public required string FacetId { get; init; }
/// <summary>
/// Gets the files added since baseline.
/// </summary>
public required ImmutableArray<FacetFileEntry> Added { get; init; }
/// <summary>
/// Gets the files removed since baseline.
/// </summary>
public required ImmutableArray<FacetFileEntry> Removed { get; init; }
/// <summary>
/// Gets the files modified since baseline.
/// </summary>
public required ImmutableArray<FacetFileModification> Modified { get; init; }
/// <summary>
/// Gets the drift score (0-100, higher = more drift).
/// </summary>
/// <remarks>
/// The drift score weighs additions, removals, and modifications
/// to produce a single measure of change magnitude.
/// </remarks>
public required decimal DriftScore { get; init; }
/// <summary>
/// Gets the quota evaluation result.
/// </summary>
public required QuotaVerdict QuotaVerdict { get; init; }
/// <summary>
/// Gets the number of files in baseline facet seal.
/// </summary>
public required int BaselineFileCount { get; init; }
/// <summary>
/// Gets the total number of changes (added + removed + modified).
/// </summary>
public int TotalChanges => Added.Length + Removed.Length + Modified.Length;
/// <summary>
/// Gets the churn percentage = (changes / baseline count) * 100.
/// </summary>
public decimal ChurnPercent => BaselineFileCount > 0
? TotalChanges / (decimal)BaselineFileCount * 100
: Added.Length > 0 ? 100m : 0m;
/// <summary>
/// Gets whether this facet has any drift.
/// </summary>
public bool HasDrift => TotalChanges > 0;
/// <summary>
/// Gets a no-drift instance for a facet.
/// </summary>
public static FacetDrift NoDrift(string facetId, int baselineFileCount) => new()
{
FacetId = facetId,
Added = [],
Removed = [],
Modified = [],
DriftScore = 0m,
QuotaVerdict = QuotaVerdict.Ok,
BaselineFileCount = baselineFileCount
};
}
/// <summary>
/// Aggregated drift report for all facets in an image.
/// </summary>
public sealed record FacetDriftReport
{
/// <summary>
/// Gets the image digest analyzed.
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Gets the baseline seal used for comparison.
/// </summary>
public required string BaselineSealId { get; init; }
/// <summary>
/// Gets when the analysis was performed.
/// </summary>
public required DateTimeOffset AnalyzedAt { get; init; }
/// <summary>
/// Gets the per-facet drift results.
/// </summary>
public required ImmutableArray<FacetDrift> FacetDrifts { get; init; }
/// <summary>
/// Gets the overall verdict (worst of all facets).
/// </summary>
public required QuotaVerdict OverallVerdict { get; init; }
/// <summary>
/// Gets the total files changed across all facets.
/// </summary>
public int TotalChangedFiles => FacetDrifts.Sum(d => d.TotalChanges);
/// <summary>
/// Gets the facets with any drift.
/// </summary>
public IEnumerable<FacetDrift> DriftedFacets => FacetDrifts.Where(d => d.HasDrift);
/// <summary>
/// Gets the facets with quota violations.
/// </summary>
public IEnumerable<FacetDrift> QuotaViolations =>
FacetDrifts.Where(d => d.QuotaVerdict is QuotaVerdict.Warning
or QuotaVerdict.Blocked
or QuotaVerdict.RequiresVex);
}

View File

@@ -0,0 +1,353 @@
// <copyright file="FacetDriftDetector.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using DotNet.Globbing;
namespace StellaOps.Facet;
/// <summary>
/// Default implementation of <see cref="IFacetDriftDetector"/>.
/// </summary>
public sealed class FacetDriftDetector : IFacetDriftDetector
{
private readonly TimeProvider _timeProvider;
/// <summary>
/// Initializes a new instance of the <see cref="FacetDriftDetector"/> class.
/// </summary>
/// <param name="timeProvider">Time provider for timestamps.</param>
public FacetDriftDetector(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
public Task<FacetDriftReport> DetectDriftAsync(
FacetSeal baseline,
FacetExtractionResult current,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(baseline);
ArgumentNullException.ThrowIfNull(current);
var drifts = new List<FacetDrift>();
// Build lookup for current facets
var currentFacetLookup = current.Facets.ToDictionary(f => f.FacetId);
// Process each baseline facet
foreach (var baselineFacet in baseline.Facets)
{
ct.ThrowIfCancellationRequested();
if (currentFacetLookup.TryGetValue(baselineFacet.FacetId, out var currentFacet))
{
// Both have this facet - compute drift
var drift = ComputeFacetDrift(
baselineFacet,
currentFacet,
baseline.GetQuota(baselineFacet.FacetId));
drifts.Add(drift);
currentFacetLookup.Remove(baselineFacet.FacetId);
}
else
{
// Facet was removed entirely - all files are "removed"
var drift = CreateRemovedFacetDrift(baselineFacet, baseline.GetQuota(baselineFacet.FacetId));
drifts.Add(drift);
}
}
// Remaining current facets are new
foreach (var newFacet in currentFacetLookup.Values)
{
var drift = CreateNewFacetDrift(newFacet);
drifts.Add(drift);
}
var overallVerdict = ComputeOverallVerdict(drifts);
var report = new FacetDriftReport
{
ImageDigest = baseline.ImageDigest,
BaselineSealId = baseline.CombinedMerkleRoot,
AnalyzedAt = _timeProvider.GetUtcNow(),
FacetDrifts = [.. drifts],
OverallVerdict = overallVerdict
};
return Task.FromResult(report);
}
/// <inheritdoc/>
public Task<FacetDriftReport> DetectDriftAsync(
FacetSeal baseline,
FacetSeal current,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(baseline);
ArgumentNullException.ThrowIfNull(current);
var drifts = new List<FacetDrift>();
// Build lookup for current facets
var currentFacetLookup = current.Facets.ToDictionary(f => f.FacetId);
// Process each baseline facet
foreach (var baselineFacet in baseline.Facets)
{
ct.ThrowIfCancellationRequested();
if (currentFacetLookup.TryGetValue(baselineFacet.FacetId, out var currentFacet))
{
// Both have this facet - compute drift
var drift = ComputeFacetDrift(
baselineFacet,
currentFacet,
baseline.GetQuota(baselineFacet.FacetId));
drifts.Add(drift);
currentFacetLookup.Remove(baselineFacet.FacetId);
}
else
{
// Facet was removed entirely
var drift = CreateRemovedFacetDrift(baselineFacet, baseline.GetQuota(baselineFacet.FacetId));
drifts.Add(drift);
}
}
// Remaining current facets are new
foreach (var newFacet in currentFacetLookup.Values)
{
var drift = CreateNewFacetDrift(newFacet);
drifts.Add(drift);
}
var overallVerdict = ComputeOverallVerdict(drifts);
var report = new FacetDriftReport
{
ImageDigest = current.ImageDigest,
BaselineSealId = baseline.CombinedMerkleRoot,
AnalyzedAt = _timeProvider.GetUtcNow(),
FacetDrifts = [.. drifts],
OverallVerdict = overallVerdict
};
return Task.FromResult(report);
}
private static FacetDrift ComputeFacetDrift(
FacetEntry baseline,
FacetEntry current,
FacetQuota quota)
{
// Quick check: if Merkle roots match, no drift
if (baseline.MerkleRoot == current.MerkleRoot)
{
return FacetDrift.NoDrift(baseline.FacetId, baseline.FileCount);
}
// Need file-level comparison
if (baseline.Files is null || current.Files is null)
{
// Can't compute detailed drift without file entries
// Fall back to root-level drift indication
return new FacetDrift
{
FacetId = baseline.FacetId,
Added = [],
Removed = [],
Modified = [],
DriftScore = 100m, // Max drift since we can't compute details
QuotaVerdict = quota.Action switch
{
QuotaExceededAction.Block => QuotaVerdict.Blocked,
QuotaExceededAction.RequireVex => QuotaVerdict.RequiresVex,
_ => QuotaVerdict.Warning
},
BaselineFileCount = baseline.FileCount
};
}
// Build allowlist globs
var allowlistGlobs = quota.AllowlistGlobs
.Select(p => Glob.Parse(p))
.ToList();
bool IsAllowlisted(string path) => allowlistGlobs.Any(g => g.IsMatch(path));
// Build file dictionaries
var baselineFiles = baseline.Files.Value.ToDictionary(f => f.Path);
var currentFiles = current.Files.Value.ToDictionary(f => f.Path);
var added = new List<FacetFileEntry>();
var removed = new List<FacetFileEntry>();
var modified = new List<FacetFileModification>();
// Find additions and modifications
foreach (var (path, currentFile) in currentFiles)
{
if (IsAllowlisted(path))
{
continue;
}
if (baselineFiles.TryGetValue(path, out var baselineFile))
{
// File exists in both - check for modification
if (baselineFile.Digest != currentFile.Digest)
{
modified.Add(new FacetFileModification(
path,
baselineFile.Digest,
currentFile.Digest,
baselineFile.SizeBytes,
currentFile.SizeBytes));
}
}
else
{
// File is new
added.Add(currentFile);
}
}
// Find removals
foreach (var (path, baselineFile) in baselineFiles)
{
if (IsAllowlisted(path))
{
continue;
}
if (!currentFiles.ContainsKey(path))
{
removed.Add(baselineFile);
}
}
var totalChanges = added.Count + removed.Count + modified.Count;
var driftScore = ComputeDriftScore(
added.Count,
removed.Count,
modified.Count,
baseline.FileCount);
var churnPercent = baseline.FileCount > 0
? totalChanges / (decimal)baseline.FileCount * 100
: added.Count > 0 ? 100m : 0m;
var verdict = EvaluateQuota(quota, churnPercent, totalChanges);
return new FacetDrift
{
FacetId = baseline.FacetId,
Added = [.. added],
Removed = [.. removed],
Modified = [.. modified],
DriftScore = driftScore,
QuotaVerdict = verdict,
BaselineFileCount = baseline.FileCount
};
}
private static FacetDrift CreateRemovedFacetDrift(FacetEntry baseline, FacetQuota quota)
{
var removedFiles = baseline.Files?.ToImmutableArray() ?? [];
var verdict = quota.Action switch
{
QuotaExceededAction.Block => QuotaVerdict.Blocked,
QuotaExceededAction.RequireVex => QuotaVerdict.RequiresVex,
_ => QuotaVerdict.Warning
};
return new FacetDrift
{
FacetId = baseline.FacetId,
Added = [],
Removed = removedFiles,
Modified = [],
DriftScore = 100m,
QuotaVerdict = verdict,
BaselineFileCount = baseline.FileCount
};
}
private static FacetDrift CreateNewFacetDrift(FacetEntry newFacet)
{
var addedFiles = newFacet.Files?.ToImmutableArray() ?? [];
return new FacetDrift
{
FacetId = newFacet.FacetId,
Added = addedFiles,
Removed = [],
Modified = [],
DriftScore = 100m, // All new = max drift from baseline perspective
QuotaVerdict = QuotaVerdict.Warning, // New facets get warning by default
BaselineFileCount = 0
};
}
private static decimal ComputeDriftScore(
int added,
int removed,
int modified,
int baselineCount)
{
if (baselineCount == 0)
{
return added > 0 ? 100m : 0m;
}
// Weighted score: additions=1.0, removals=1.0, modifications=0.5
var weightedChanges = added + removed + (modified * 0.5m);
var score = weightedChanges / baselineCount * 100;
return Math.Min(100m, score);
}
private static QuotaVerdict EvaluateQuota(FacetQuota quota, decimal churnPercent, int totalChanges)
{
var exceeds = churnPercent > quota.MaxChurnPercent ||
totalChanges > quota.MaxChangedFiles;
if (!exceeds)
{
return QuotaVerdict.Ok;
}
return quota.Action switch
{
QuotaExceededAction.Block => QuotaVerdict.Blocked,
QuotaExceededAction.RequireVex => QuotaVerdict.RequiresVex,
_ => QuotaVerdict.Warning
};
}
private static QuotaVerdict ComputeOverallVerdict(List<FacetDrift> drifts)
{
// Return worst verdict
if (drifts.Any(d => d.QuotaVerdict == QuotaVerdict.Blocked))
{
return QuotaVerdict.Blocked;
}
if (drifts.Any(d => d.QuotaVerdict == QuotaVerdict.RequiresVex))
{
return QuotaVerdict.RequiresVex;
}
if (drifts.Any(d => d.QuotaVerdict == QuotaVerdict.Warning))
{
return QuotaVerdict.Warning;
}
return QuotaVerdict.Ok;
}
}

View File

@@ -0,0 +1,59 @@
// <copyright file="FacetEntry.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// A sealed facet entry within a <see cref="FacetSeal"/>.
/// </summary>
public sealed record FacetEntry
{
/// <summary>
/// Gets the facet identifier (e.g., "os-packages-dpkg", "lang-deps-npm").
/// </summary>
public required string FacetId { get; init; }
/// <summary>
/// Gets the human-readable name.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Gets the category for grouping.
/// </summary>
public required FacetCategory Category { get; init; }
/// <summary>
/// Gets the selectors used to identify files in this facet.
/// </summary>
public required ImmutableArray<string> Selectors { get; init; }
/// <summary>
/// Gets the Merkle root of all files in this facet.
/// </summary>
/// <remarks>
/// Format: "sha256:{hex}" computed from sorted file entries.
/// </remarks>
public required string MerkleRoot { get; init; }
/// <summary>
/// Gets the number of files in this facet.
/// </summary>
public required int FileCount { get; init; }
/// <summary>
/// Gets the total bytes across all files.
/// </summary>
public required long TotalBytes { get; init; }
/// <summary>
/// Gets the optional individual file entries (for detailed audit).
/// </summary>
/// <remarks>
/// May be null for compact seals that only store Merkle roots.
/// </remarks>
public ImmutableArray<FacetFileEntry>? Files { get; init; }
}

View File

@@ -0,0 +1,78 @@
// <copyright file="FacetExtractionOptions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// Options for facet extraction operations.
/// </summary>
public sealed record FacetExtractionOptions
{
/// <summary>
/// Gets the facets to extract. If empty, all built-in facets are used.
/// </summary>
public ImmutableArray<IFacet> Facets { get; init; } = [];
/// <summary>
/// Gets whether to include individual file entries in the result.
/// </summary>
/// <remarks>
/// When false, only Merkle roots are computed (more compact).
/// When true, all file details are preserved for audit.
/// </remarks>
public bool IncludeFileDetails { get; init; } = true;
/// <summary>
/// Gets whether to compute Merkle proofs for each file.
/// </summary>
/// <remarks>
/// Enabling proofs allows individual file verification against the facet root.
/// </remarks>
public bool ComputeMerkleProofs { get; init; }
/// <summary>
/// Gets glob patterns for files to exclude from extraction.
/// </summary>
public ImmutableArray<string> ExcludePatterns { get; init; } = [];
/// <summary>
/// Gets the hash algorithm to use (default: SHA256).
/// </summary>
public string HashAlgorithm { get; init; } = "SHA256";
/// <summary>
/// Gets whether to follow symlinks.
/// </summary>
public bool FollowSymlinks { get; init; }
/// <summary>
/// Gets the maximum file size to hash (larger files are skipped with placeholder).
/// </summary>
public long MaxFileSizeBytes { get; init; } = 100 * 1024 * 1024; // 100MB
/// <summary>
/// Gets the default options.
/// </summary>
public static FacetExtractionOptions Default { get; } = new();
/// <summary>
/// Gets options for compact sealing (no file details, just roots).
/// </summary>
public static FacetExtractionOptions Compact { get; } = new()
{
IncludeFileDetails = false,
ComputeMerkleProofs = false
};
/// <summary>
/// Gets options for full audit (all details and proofs).
/// </summary>
public static FacetExtractionOptions FullAudit { get; } = new()
{
IncludeFileDetails = true,
ComputeMerkleProofs = true
};
}

View File

@@ -0,0 +1,86 @@
// <copyright file="FacetExtractionResult.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// Result of facet extraction from an image.
/// </summary>
public sealed record FacetExtractionResult
{
/// <summary>
/// Gets the extracted facet entries.
/// </summary>
public required ImmutableArray<FacetEntry> Facets { get; init; }
/// <summary>
/// Gets files that didn't match any facet selector.
/// </summary>
public required ImmutableArray<FacetFileEntry> UnmatchedFiles { get; init; }
/// <summary>
/// Gets files that were skipped (too large, unreadable, etc.).
/// </summary>
public required ImmutableArray<SkippedFile> SkippedFiles { get; init; }
/// <summary>
/// Gets the combined Merkle root of all facets.
/// </summary>
public required string CombinedMerkleRoot { get; init; }
/// <summary>
/// Gets extraction statistics.
/// </summary>
public required FacetExtractionStats Stats { get; init; }
/// <summary>
/// Gets extraction warnings (non-fatal issues).
/// </summary>
public ImmutableArray<string> Warnings { get; init; } = [];
}
/// <summary>
/// A file that was skipped during extraction.
/// </summary>
/// <param name="Path">The file path.</param>
/// <param name="Reason">Why the file was skipped.</param>
public sealed record SkippedFile(string Path, string Reason);
/// <summary>
/// Statistics from facet extraction.
/// </summary>
public sealed record FacetExtractionStats
{
/// <summary>
/// Gets the total files processed.
/// </summary>
public required int TotalFilesProcessed { get; init; }
/// <summary>
/// Gets the total bytes across all files.
/// </summary>
public required long TotalBytes { get; init; }
/// <summary>
/// Gets the number of files matched to facets.
/// </summary>
public required int FilesMatched { get; init; }
/// <summary>
/// Gets the number of files not matching any facet.
/// </summary>
public required int FilesUnmatched { get; init; }
/// <summary>
/// Gets the number of files skipped.
/// </summary>
public required int FilesSkipped { get; init; }
/// <summary>
/// Gets the extraction duration.
/// </summary>
public required TimeSpan Duration { get; init; }
}

View File

@@ -0,0 +1,18 @@
// <copyright file="FacetFileEntry.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Represents a single file within a facet.
/// </summary>
/// <param name="Path">The file path within the image.</param>
/// <param name="Digest">Content hash in "algorithm:hex" format (e.g., "sha256:abc...").</param>
/// <param name="SizeBytes">File size in bytes.</param>
/// <param name="ModifiedAt">Last modification timestamp, if available.</param>
public sealed record FacetFileEntry(
string Path,
string Digest,
long SizeBytes,
DateTimeOffset? ModifiedAt);

View File

@@ -0,0 +1,26 @@
// <copyright file="FacetFileModification.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Represents a modified file between baseline and current state.
/// </summary>
/// <param name="Path">The file path within the image.</param>
/// <param name="PreviousDigest">Content hash from baseline.</param>
/// <param name="CurrentDigest">Content hash from current state.</param>
/// <param name="PreviousSizeBytes">File size in baseline.</param>
/// <param name="CurrentSizeBytes">File size in current state.</param>
public sealed record FacetFileModification(
string Path,
string PreviousDigest,
string CurrentDigest,
long PreviousSizeBytes,
long CurrentSizeBytes)
{
/// <summary>
/// Gets the size change in bytes (positive = growth, negative = shrinkage).
/// </summary>
public long SizeDelta => CurrentSizeBytes - PreviousSizeBytes;
}

View File

@@ -0,0 +1,194 @@
// <copyright file="FacetMerkleTree.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Globalization;
using System.Text;
namespace StellaOps.Facet;
/// <summary>
/// Computes deterministic Merkle roots for facet file sets.
/// </summary>
/// <remarks>
/// <para>
/// Leaf nodes are computed from: path | digest | size (sorted by path).
/// Internal nodes are computed by concatenating and hashing child pairs.
/// </para>
/// </remarks>
public sealed class FacetMerkleTree
{
private readonly ICryptoHash _cryptoHash;
private readonly string _algorithm;
/// <summary>
/// Empty tree root constant (SHA-256 of empty string).
/// </summary>
public const string EmptyTreeRoot = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
/// <summary>
/// Initializes a new instance of the <see cref="FacetMerkleTree"/> class.
/// </summary>
/// <param name="cryptoHash">Cryptographic hash implementation.</param>
/// <param name="algorithm">Hash algorithm to use (default: SHA256).</param>
public FacetMerkleTree(ICryptoHash? cryptoHash = null, string algorithm = "SHA256")
{
_cryptoHash = cryptoHash ?? DefaultCryptoHash.Instance;
_algorithm = algorithm;
}
/// <summary>
/// Compute Merkle root from file entries.
/// </summary>
/// <param name="files">Files to include in the tree.</param>
/// <returns>Merkle root in "sha256:{hex}" format.</returns>
public string ComputeRoot(IEnumerable<FacetFileEntry> files)
{
ArgumentNullException.ThrowIfNull(files);
// Sort files by path for determinism (ordinal comparison)
var sortedFiles = files
.OrderBy(f => f.Path, StringComparer.Ordinal)
.ToList();
if (sortedFiles.Count == 0)
{
return EmptyTreeRoot;
}
// Build leaf nodes
var leaves = sortedFiles
.Select(ComputeLeafHash)
.ToList();
// Build tree and return root
return ComputeMerkleRootFromNodes(leaves);
}
/// <summary>
/// Compute combined root from multiple facet entries.
/// </summary>
/// <param name="facets">Facet entries with Merkle roots.</param>
/// <returns>Combined Merkle root.</returns>
public string ComputeCombinedRoot(IEnumerable<FacetEntry> facets)
{
ArgumentNullException.ThrowIfNull(facets);
var facetRoots = facets
.OrderBy(f => f.FacetId, StringComparer.Ordinal)
.Select(f => HexToBytes(StripAlgorithmPrefix(f.MerkleRoot)))
.ToList();
if (facetRoots.Count == 0)
{
return EmptyTreeRoot;
}
return ComputeMerkleRootFromNodes(facetRoots);
}
/// <summary>
/// Verify that a file is included in a Merkle root.
/// </summary>
/// <param name="file">The file to verify.</param>
/// <param name="proof">The Merkle proof (sibling hashes).</param>
/// <param name="expectedRoot">The expected Merkle root.</param>
/// <returns>True if the proof is valid.</returns>
public bool VerifyProof(FacetFileEntry file, IReadOnlyList<byte[]> proof, string expectedRoot)
{
ArgumentNullException.ThrowIfNull(file);
ArgumentNullException.ThrowIfNull(proof);
var currentHash = ComputeLeafHash(file);
foreach (var sibling in proof)
{
// Determine ordering: smaller hash comes first
var comparison = CompareHashes(currentHash, sibling);
currentHash = comparison <= 0
? HashPair(currentHash, sibling)
: HashPair(sibling, currentHash);
}
var computedRoot = FormatRoot(currentHash);
return string.Equals(computedRoot, expectedRoot, StringComparison.OrdinalIgnoreCase);
}
private byte[] ComputeLeafHash(FacetFileEntry file)
{
// Canonical leaf format: "path|digest|size"
// Using InvariantCulture for size formatting
var canonical = string.Create(
CultureInfo.InvariantCulture,
$"{file.Path}|{file.Digest}|{file.SizeBytes}");
return _cryptoHash.ComputeHash(Encoding.UTF8.GetBytes(canonical), _algorithm);
}
private string ComputeMerkleRootFromNodes(List<byte[]> nodes)
{
while (nodes.Count > 1)
{
var nextLevel = new List<byte[]>();
for (var i = 0; i < nodes.Count; i += 2)
{
if (i + 1 < nodes.Count)
{
// Hash pair of nodes
nextLevel.Add(HashPair(nodes[i], nodes[i + 1]));
}
else
{
// Odd node: promote as-is (or optionally hash with itself)
nextLevel.Add(nodes[i]);
}
}
nodes = nextLevel;
}
return FormatRoot(nodes[0]);
}
private byte[] HashPair(byte[] left, byte[] right)
{
var combined = new byte[left.Length + right.Length];
left.CopyTo(combined, 0);
right.CopyTo(combined, left.Length);
return _cryptoHash.ComputeHash(combined, _algorithm);
}
private static int CompareHashes(byte[] a, byte[] b)
{
var minLength = Math.Min(a.Length, b.Length);
for (var i = 0; i < minLength; i++)
{
var cmp = a[i].CompareTo(b[i]);
if (cmp != 0)
{
return cmp;
}
}
return a.Length.CompareTo(b.Length);
}
private string FormatRoot(byte[] hash)
{
var algPrefix = _algorithm.ToLowerInvariant();
var hex = Convert.ToHexString(hash).ToLowerInvariant();
return $"{algPrefix}:{hex}";
}
private static string StripAlgorithmPrefix(string digest)
{
var colonIndex = digest.IndexOf(':', StringComparison.Ordinal);
return colonIndex >= 0 ? digest[(colonIndex + 1)..] : digest;
}
private static byte[] HexToBytes(string hex)
{
return Convert.FromHexString(hex);
}
}

View File

@@ -0,0 +1,65 @@
// <copyright file="FacetQuota.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// Quota configuration for a facet, defining acceptable drift thresholds.
/// </summary>
public sealed record FacetQuota
{
/// <summary>
/// Gets or initializes the maximum allowed churn percentage (0-100).
/// </summary>
/// <remarks>
/// Churn = (added + removed + modified files) / baseline file count * 100.
/// </remarks>
public decimal MaxChurnPercent { get; init; } = 10m;
/// <summary>
/// Gets or initializes the maximum number of changed files before alert.
/// </summary>
public int MaxChangedFiles { get; init; } = 50;
/// <summary>
/// Gets or initializes the glob patterns for files exempt from quota enforcement.
/// </summary>
/// <remarks>
/// Files matching these patterns are excluded from drift calculations.
/// Useful for expected changes like logs, timestamps, or cache files.
/// </remarks>
public ImmutableArray<string> AllowlistGlobs { get; init; } = [];
/// <summary>
/// Gets or initializes the action when quota is exceeded.
/// </summary>
public QuotaExceededAction Action { get; init; } = QuotaExceededAction.Warn;
/// <summary>
/// Gets the default quota configuration.
/// </summary>
public static FacetQuota Default { get; } = new();
/// <summary>
/// Creates a strict quota suitable for high-security binaries.
/// </summary>
public static FacetQuota Strict { get; } = new()
{
MaxChurnPercent = 5m,
MaxChangedFiles = 10,
Action = QuotaExceededAction.Block
};
/// <summary>
/// Creates a permissive quota suitable for frequently-updated dependencies.
/// </summary>
public static FacetQuota Permissive { get; } = new()
{
MaxChurnPercent = 25m,
MaxChangedFiles = 200,
Action = QuotaExceededAction.Warn
};
}

View File

@@ -0,0 +1,114 @@
// <copyright file="FacetSeal.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// Sealed manifest of facets for an image at a point in time.
/// </summary>
/// <remarks>
/// <para>
/// A FacetSeal captures the cryptographic state of all facets in an image,
/// enabling drift detection and quota enforcement on subsequent scans.
/// </para>
/// <para>
/// The seal can be optionally signed with DSSE for authenticity verification.
/// </para>
/// </remarks>
public sealed record FacetSeal
{
/// <summary>
/// Current schema version.
/// </summary>
public const string CurrentSchemaVersion = "1.0.0";
/// <summary>
/// Gets the schema version for forward compatibility.
/// </summary>
public string SchemaVersion { get; init; } = CurrentSchemaVersion;
/// <summary>
/// Gets the image digest this seal applies to.
/// </summary>
/// <remarks>
/// Format: "sha256:{hex}" or "sha512:{hex}".
/// </remarks>
public required string ImageDigest { get; init; }
/// <summary>
/// Gets when the seal was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Gets the optional build attestation reference (in-toto provenance).
/// </summary>
public string? BuildAttestationRef { get; init; }
/// <summary>
/// Gets the individual facet seals.
/// </summary>
public required ImmutableArray<FacetEntry> Facets { get; init; }
/// <summary>
/// Gets the quota configuration per facet.
/// </summary>
/// <remarks>
/// Keys are facet IDs. Facets without explicit quotas use default values.
/// </remarks>
public ImmutableDictionary<string, FacetQuota>? Quotas { get; init; }
/// <summary>
/// Gets the combined Merkle root of all facet roots.
/// </summary>
/// <remarks>
/// Computed from facet Merkle roots in sorted order by FacetId.
/// Enables single-value integrity verification.
/// </remarks>
public required string CombinedMerkleRoot { get; init; }
/// <summary>
/// Gets the optional DSSE signature over canonical form.
/// </summary>
/// <remarks>
/// Base64-encoded DSSE envelope when the seal is signed.
/// </remarks>
public string? Signature { get; init; }
/// <summary>
/// Gets the signing key identifier, if signed.
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// Gets whether this seal is signed.
/// </summary>
public bool IsSigned => !string.IsNullOrEmpty(Signature);
/// <summary>
/// Gets the quota for a specific facet, or default if not configured.
/// </summary>
/// <param name="facetId">The facet identifier.</param>
/// <returns>The configured quota or <see cref="FacetQuota.Default"/>.</returns>
public FacetQuota GetQuota(string facetId)
{
if (Quotas is not null &&
Quotas.TryGetValue(facetId, out var quota))
{
return quota;
}
return FacetQuota.Default;
}
/// <summary>
/// Gets a facet entry by ID.
/// </summary>
/// <param name="facetId">The facet identifier.</param>
/// <returns>The facet entry or null if not found.</returns>
public FacetEntry? GetFacet(string facetId)
=> Facets.FirstOrDefault(f => f.FacetId == facetId);
}

View File

@@ -0,0 +1,121 @@
// <copyright file="FacetSealer.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
namespace StellaOps.Facet;
/// <summary>
/// Creates <see cref="FacetSeal"/> instances from extraction results.
/// </summary>
public sealed class FacetSealer
{
private readonly TimeProvider _timeProvider;
private readonly FacetMerkleTree _merkleTree;
/// <summary>
/// Initializes a new instance of the <see cref="FacetSealer"/> class.
/// </summary>
/// <param name="timeProvider">Time provider for timestamps.</param>
/// <param name="cryptoHash">Hash implementation.</param>
/// <param name="algorithm">Hash algorithm.</param>
public FacetSealer(
TimeProvider? timeProvider = null,
ICryptoHash? cryptoHash = null,
string algorithm = "SHA256")
{
_timeProvider = timeProvider ?? TimeProvider.System;
_merkleTree = new FacetMerkleTree(cryptoHash, algorithm);
}
/// <summary>
/// Create a seal from extraction result.
/// </summary>
/// <param name="imageDigest">The image digest this seal applies to.</param>
/// <param name="extraction">The extraction result.</param>
/// <param name="quotas">Optional per-facet quota configuration.</param>
/// <param name="buildAttestationRef">Optional build attestation reference.</param>
/// <returns>The created seal.</returns>
public FacetSeal CreateSeal(
string imageDigest,
FacetExtractionResult extraction,
ImmutableDictionary<string, FacetQuota>? quotas = null,
string? buildAttestationRef = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
ArgumentNullException.ThrowIfNull(extraction);
var combinedRoot = _merkleTree.ComputeCombinedRoot(extraction.Facets);
return new FacetSeal
{
ImageDigest = imageDigest,
CreatedAt = _timeProvider.GetUtcNow(),
BuildAttestationRef = buildAttestationRef,
Facets = extraction.Facets,
Quotas = quotas,
CombinedMerkleRoot = combinedRoot
};
}
/// <summary>
/// Create a seal from facet entries directly.
/// </summary>
/// <param name="imageDigest">The image digest.</param>
/// <param name="facets">The facet entries.</param>
/// <param name="quotas">Optional quotas.</param>
/// <param name="buildAttestationRef">Optional attestation ref.</param>
/// <returns>The created seal.</returns>
public FacetSeal CreateSeal(
string imageDigest,
ImmutableArray<FacetEntry> facets,
ImmutableDictionary<string, FacetQuota>? quotas = null,
string? buildAttestationRef = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
var combinedRoot = _merkleTree.ComputeCombinedRoot(facets);
return new FacetSeal
{
ImageDigest = imageDigest,
CreatedAt = _timeProvider.GetUtcNow(),
BuildAttestationRef = buildAttestationRef,
Facets = facets,
Quotas = quotas,
CombinedMerkleRoot = combinedRoot
};
}
/// <summary>
/// Create a facet entry from file entries.
/// </summary>
/// <param name="facet">The facet definition.</param>
/// <param name="files">Files belonging to this facet.</param>
/// <param name="includeFileDetails">Whether to include individual file entries.</param>
/// <returns>The facet entry.</returns>
public FacetEntry CreateFacetEntry(
IFacet facet,
IReadOnlyList<FacetFileEntry> files,
bool includeFileDetails = true)
{
ArgumentNullException.ThrowIfNull(facet);
ArgumentNullException.ThrowIfNull(files);
var merkleRoot = _merkleTree.ComputeRoot(files);
var totalBytes = files.Sum(f => f.SizeBytes);
return new FacetEntry
{
FacetId = facet.FacetId,
Name = facet.Name,
Category = facet.Category,
Selectors = [.. facet.Selectors],
MerkleRoot = merkleRoot,
FileCount = files.Count,
TotalBytes = totalBytes,
Files = includeFileDetails ? [.. files] : null
};
}
}

View File

@@ -0,0 +1,137 @@
// <copyright file="FacetServiceCollectionExtensions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Facet;
/// <summary>
/// Extension methods for registering facet services with dependency injection.
/// </summary>
public static class FacetServiceCollectionExtensions
{
/// <summary>
/// Add facet services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddFacetServices(this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
// Register crypto hash
services.TryAddSingleton<ICryptoHash>(DefaultCryptoHash.Instance);
// Register Merkle tree
services.TryAddSingleton(sp =>
{
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
return new FacetMerkleTree(crypto);
});
// Register classifier with built-in facets
services.TryAddSingleton(_ => FacetClassifier.Default);
// Register sealer
services.TryAddSingleton(sp =>
{
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
return new FacetSealer(timeProvider, crypto);
});
// Register drift detector
services.TryAddSingleton<IFacetDriftDetector>(sp =>
{
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new FacetDriftDetector(timeProvider);
});
return services;
}
/// <summary>
/// Add facet services with custom configuration.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configure">Configuration action.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddFacetServices(
this IServiceCollection services,
Action<FacetServiceOptions> configure)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configure);
var options = new FacetServiceOptions();
configure(options);
// Register crypto hash
if (options.CryptoHash is not null)
{
services.AddSingleton(options.CryptoHash);
}
else
{
services.TryAddSingleton<ICryptoHash>(DefaultCryptoHash.Instance);
}
// Register custom facets if provided
if (options.CustomFacets is { Count: > 0 })
{
var allFacets = BuiltInFacets.All.Concat(options.CustomFacets).ToList();
services.AddSingleton(new FacetClassifier(allFacets));
}
else
{
services.TryAddSingleton(_ => FacetClassifier.Default);
}
// Register Merkle tree with algorithm
services.TryAddSingleton(sp =>
{
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
return new FacetMerkleTree(crypto, options.HashAlgorithm);
});
// Register sealer
services.TryAddSingleton(sp =>
{
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
return new FacetSealer(timeProvider, crypto, options.HashAlgorithm);
});
// Register drift detector
services.TryAddSingleton<IFacetDriftDetector>(sp =>
{
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
return new FacetDriftDetector(timeProvider);
});
return services;
}
}
/// <summary>
/// Configuration options for facet services.
/// </summary>
public sealed class FacetServiceOptions
{
/// <summary>
/// Gets or sets the hash algorithm (default: SHA256).
/// </summary>
public string HashAlgorithm { get; set; } = "SHA256";
/// <summary>
/// Gets or sets custom facet definitions to add to built-ins.
/// </summary>
public List<IFacet>? CustomFacets { get; set; }
/// <summary>
/// Gets or sets a custom crypto hash implementation.
/// </summary>
public ICryptoHash? CryptoHash { get; set; }
}

View File

@@ -0,0 +1,70 @@
// <copyright file="GlobMatcher.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using DotNet.Globbing;
namespace StellaOps.Facet;
/// <summary>
/// Utility for matching file paths against glob patterns.
/// </summary>
public sealed class GlobMatcher
{
private readonly List<Glob> _globs;
/// <summary>
/// Initializes a new instance of the <see cref="GlobMatcher"/> class.
/// </summary>
/// <param name="patterns">Glob patterns to match against.</param>
public GlobMatcher(IEnumerable<string> patterns)
{
ArgumentNullException.ThrowIfNull(patterns);
_globs = patterns
.Select(p => Glob.Parse(NormalizePattern(p)))
.ToList();
}
/// <summary>
/// Check if a path matches any of the patterns.
/// </summary>
/// <param name="path">The path to check (Unix-style).</param>
/// <returns>True if any pattern matches.</returns>
public bool IsMatch(string path)
{
ArgumentNullException.ThrowIfNull(path);
var normalizedPath = NormalizePath(path);
return _globs.Any(g => g.IsMatch(normalizedPath));
}
/// <summary>
/// Create a matcher for a single facet.
/// </summary>
/// <param name="facet">The facet to create a matcher for.</param>
/// <returns>A GlobMatcher for the facet's selectors.</returns>
public static GlobMatcher ForFacet(IFacet facet)
{
ArgumentNullException.ThrowIfNull(facet);
return new GlobMatcher(facet.Selectors);
}
private static string NormalizePattern(string pattern)
{
// Ensure patterns use forward slashes
return pattern.Replace('\\', '/');
}
private static string NormalizePath(string path)
{
// Ensure paths use forward slashes and are rooted
var normalized = path.Replace('\\', '/');
if (!normalized.StartsWith('/'))
{
normalized = "/" + normalized;
}
return normalized;
}
}

View File

@@ -0,0 +1,32 @@
// <copyright file="ICryptoHash.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Abstraction for cryptographic hash operations.
/// </summary>
/// <remarks>
/// This interface allows the facet library to be used with different
/// cryptographic implementations (e.g., built-in .NET, BouncyCastle, HSM).
/// </remarks>
public interface ICryptoHash
{
/// <summary>
/// Compute hash of the given data.
/// </summary>
/// <param name="data">Data to hash.</param>
/// <param name="algorithm">Algorithm name (e.g., "SHA256", "SHA512").</param>
/// <returns>Hash bytes.</returns>
byte[] ComputeHash(byte[] data, string algorithm);
/// <summary>
/// Compute hash of a stream.
/// </summary>
/// <param name="stream">Stream to hash.</param>
/// <param name="algorithm">Algorithm name.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Hash bytes.</returns>
Task<byte[]> ComputeHashAsync(Stream stream, string algorithm, CancellationToken ct = default);
}

View File

@@ -0,0 +1,60 @@
// <copyright file="IFacet.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Represents a trackable slice of an image.
/// </summary>
/// <remarks>
/// <para>
/// A facet defines a logical grouping of files within a container image
/// that can be tracked independently for sealing and drift detection.
/// </para>
/// <para>
/// Examples of facets: OS packages, language dependencies, binaries, config files.
/// </para>
/// </remarks>
public interface IFacet
{
/// <summary>
/// Gets the unique identifier for this facet type.
/// </summary>
/// <remarks>
/// Format: "{category}-{specifics}" e.g., "os-packages-dpkg", "lang-deps-npm".
/// </remarks>
string FacetId { get; }
/// <summary>
/// Gets the human-readable name.
/// </summary>
string Name { get; }
/// <summary>
/// Gets the facet category for grouping.
/// </summary>
FacetCategory Category { get; }
/// <summary>
/// Gets the glob patterns or path selectors for files in this facet.
/// </summary>
/// <remarks>
/// <para>Selectors support:</para>
/// <list type="bullet">
/// <item><description>Glob patterns: "**/*.json", "/usr/bin/*"</description></item>
/// <item><description>Exact paths: "/var/lib/dpkg/status"</description></item>
/// <item><description>Directory patterns: "/etc/**"</description></item>
/// </list>
/// </remarks>
IReadOnlyList<string> Selectors { get; }
/// <summary>
/// Gets the priority for conflict resolution when files match multiple facets.
/// </summary>
/// <remarks>
/// Lower values = higher priority. A file matching multiple facets
/// will be assigned to the facet with the lowest priority value.
/// </remarks>
int Priority { get; }
}

View File

@@ -0,0 +1,35 @@
// <copyright file="IFacetDriftDetector.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Detects drift between a baseline seal and current state.
/// </summary>
public interface IFacetDriftDetector
{
/// <summary>
/// Compare current extraction result against a baseline seal.
/// </summary>
/// <param name="baseline">The baseline facet seal.</param>
/// <param name="current">The current extraction result.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Drift report with per-facet analysis.</returns>
Task<FacetDriftReport> DetectDriftAsync(
FacetSeal baseline,
FacetExtractionResult current,
CancellationToken ct = default);
/// <summary>
/// Compare two seals.
/// </summary>
/// <param name="baseline">The baseline seal.</param>
/// <param name="current">The current seal.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Drift report with per-facet analysis.</returns>
Task<FacetDriftReport> DetectDriftAsync(
FacetSeal baseline,
FacetSeal current,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,47 @@
// <copyright file="IFacetExtractor.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Extracts facet information from container images.
/// </summary>
public interface IFacetExtractor
{
/// <summary>
/// Extract facets from a local directory (unpacked image).
/// </summary>
/// <param name="rootPath">Path to the unpacked image root.</param>
/// <param name="options">Extraction options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Extraction result with all facet entries.</returns>
Task<FacetExtractionResult> ExtractFromDirectoryAsync(
string rootPath,
FacetExtractionOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Extract facets from a tar archive.
/// </summary>
/// <param name="tarStream">Stream containing the tar archive.</param>
/// <param name="options">Extraction options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Extraction result with all facet entries.</returns>
Task<FacetExtractionResult> ExtractFromTarAsync(
Stream tarStream,
FacetExtractionOptions? options = null,
CancellationToken ct = default);
/// <summary>
/// Extract facets from an OCI image layer.
/// </summary>
/// <param name="layerStream">Stream containing the layer (tar.gz).</param>
/// <param name="options">Extraction options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Extraction result with all facet entries.</returns>
Task<FacetExtractionResult> ExtractFromOciLayerAsync(
Stream layerStream,
FacetExtractionOptions? options = null,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,52 @@
// <copyright file="QuotaExceededAction.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.Facet;
/// <summary>
/// Action to take when a facet quota is exceeded.
/// </summary>
public enum QuotaExceededAction
{
/// <summary>
/// Emit a warning but allow the operation to continue.
/// </summary>
Warn,
/// <summary>
/// Block the operation (fail deployment/admission).
/// </summary>
Block,
/// <summary>
/// Require a VEX statement to authorize the drift.
/// </summary>
RequireVex
}
/// <summary>
/// Result of evaluating a facet's drift against its quota.
/// </summary>
public enum QuotaVerdict
{
/// <summary>
/// Drift is within acceptable limits.
/// </summary>
Ok,
/// <summary>
/// Drift exceeds threshold but action is Warn.
/// </summary>
Warning,
/// <summary>
/// Drift exceeds threshold and action is Block.
/// </summary>
Blocked,
/// <summary>
/// Drift requires VEX authorization.
/// </summary>
RequiresVex
}

View File

@@ -0,0 +1,143 @@
// <copyright file="FacetSealJsonConverter.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Facet.Serialization;
/// <summary>
/// JSON serialization options for facet seals.
/// </summary>
public static class FacetJsonOptions
{
/// <summary>
/// Gets the default JSON serializer options for facet seals.
/// </summary>
public static JsonSerializerOptions Default { get; } = CreateOptions();
/// <summary>
/// Gets options for compact serialization (no indentation).
/// </summary>
public static JsonSerializerOptions Compact { get; } = CreateOptions(writeIndented: false);
/// <summary>
/// Gets options for pretty-printed serialization.
/// </summary>
public static JsonSerializerOptions Pretty { get; } = CreateOptions(writeIndented: true);
private static JsonSerializerOptions CreateOptions(bool writeIndented = false)
{
var options = new JsonSerializerOptions
{
WriteIndented = writeIndented,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNameCaseInsensitive = true
};
options.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase));
options.Converters.Add(new ImmutableArrayConverterFactory());
options.Converters.Add(new ImmutableDictionaryConverterFactory());
return options;
}
}
/// <summary>
/// Converter factory for ImmutableArray{T}.
/// </summary>
internal sealed class ImmutableArrayConverterFactory : JsonConverterFactory
{
public override bool CanConvert(Type typeToConvert)
{
return typeToConvert.IsGenericType &&
typeToConvert.GetGenericTypeDefinition() == typeof(ImmutableArray<>);
}
public override JsonConverter CreateConverter(Type typeToConvert, JsonSerializerOptions options)
{
var elementType = typeToConvert.GetGenericArguments()[0];
var converterType = typeof(ImmutableArrayConverter<>).MakeGenericType(elementType);
return (JsonConverter)Activator.CreateInstance(converterType)!;
}
}
/// <summary>
/// Converter for ImmutableArray{T}.
/// </summary>
internal sealed class ImmutableArrayConverter<T> : JsonConverter<ImmutableArray<T>>
{
public override ImmutableArray<T> Read(
ref Utf8JsonReader reader,
Type typeToConvert,
JsonSerializerOptions options)
{
if (reader.TokenType == JsonTokenType.Null)
{
return [];
}
var list = JsonSerializer.Deserialize<List<T>>(ref reader, options);
return list is null ? [] : [.. list];
}
public override void Write(
Utf8JsonWriter writer,
ImmutableArray<T> value,
JsonSerializerOptions options)
{
JsonSerializer.Serialize(writer, value.AsEnumerable(), options);
}
}
/// <summary>
/// Converter factory for ImmutableDictionary{TKey,TValue}.
/// </summary>
internal sealed class ImmutableDictionaryConverterFactory : JsonConverterFactory
{
public override bool CanConvert(Type typeToConvert)
{
return typeToConvert.IsGenericType &&
typeToConvert.GetGenericTypeDefinition() == typeof(ImmutableDictionary<,>);
}
public override JsonConverter CreateConverter(Type typeToConvert, JsonSerializerOptions options)
{
var keyType = typeToConvert.GetGenericArguments()[0];
var valueType = typeToConvert.GetGenericArguments()[1];
var converterType = typeof(ImmutableDictionaryConverter<,>).MakeGenericType(keyType, valueType);
return (JsonConverter)Activator.CreateInstance(converterType)!;
}
}
/// <summary>
/// Converter for ImmutableDictionary{TKey,TValue}.
/// </summary>
internal sealed class ImmutableDictionaryConverter<TKey, TValue> : JsonConverter<ImmutableDictionary<TKey, TValue>>
where TKey : notnull
{
public override ImmutableDictionary<TKey, TValue>? Read(
ref Utf8JsonReader reader,
Type typeToConvert,
JsonSerializerOptions options)
{
if (reader.TokenType == JsonTokenType.Null)
{
return null;
}
var dict = JsonSerializer.Deserialize<Dictionary<TKey, TValue>>(ref reader, options);
return dict?.ToImmutableDictionary();
}
public override void Write(
Utf8JsonWriter writer,
ImmutableDictionary<TKey, TValue> value,
JsonSerializerOptions options)
{
JsonSerializer.Serialize(writer, value.AsEnumerable().ToDictionary(kv => kv.Key, kv => kv.Value), options);
}
}

View File

@@ -0,0 +1,18 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Description>Facet abstraction layer for per-facet sealing and drift tracking in container images.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="DotNet.Glob" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,89 @@
// <copyright file="ConcurrentHlcBenchmarks.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Engines;
using Microsoft.Extensions.Time.Testing;
namespace StellaOps.HybridLogicalClock.Benchmarks;
/// <summary>
/// Benchmarks for concurrent HLC operations.
/// Measures thread contention and scalability under parallel access.
/// </summary>
[MemoryDiagnoser]
[SimpleJob(RunStrategy.Monitoring, iterationCount: 5)]
public class ConcurrentHlcBenchmarks
{
private HybridLogicalClock _clock = null!;
private InMemoryHlcStateStore _stateStore = null!;
private FakeTimeProvider _timeProvider = null!;
[Params(1, 2, 4, 8)]
public int ThreadCount { get; set; }
[GlobalSetup]
public void Setup()
{
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
_stateStore = new InMemoryHlcStateStore();
_clock = new HybridLogicalClock(
_timeProvider,
"concurrent-benchmark-node",
_stateStore);
// Initialize the clock
_ = _clock.Tick();
}
/// <summary>
/// Benchmark concurrent tick operations.
/// Each thread generates 1000 ticks; measures total throughput and contention.
/// </summary>
[Benchmark]
public void ConcurrentTicks_1000PerThread()
{
const int ticksPerThread = 1000;
Parallel.For(0, ThreadCount, threadIndex =>
{
for (int i = 0; i < ticksPerThread; i++)
{
_clock.Tick();
}
});
}
/// <summary>
/// Benchmark mixed concurrent operations (ticks and receives).
/// Simulates real-world distributed scenario.
/// </summary>
[Benchmark]
public void ConcurrentMixed_TicksAndReceives()
{
const int operationsPerThread = 500;
Parallel.For(0, ThreadCount, threadId =>
{
for (int i = 0; i < operationsPerThread; i++)
{
if (i % 3 == 0)
{
// Every third operation is a receive
var remote = new HlcTimestamp
{
PhysicalTime = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds(),
NodeId = $"remote-node-{threadId}",
LogicalCounter = i
};
_clock.Receive(remote);
}
else
{
_clock.Tick();
}
}
});
}
}

View File

@@ -0,0 +1,104 @@
// <copyright file="HlcBenchmarks.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Engines;
using Microsoft.Extensions.Time.Testing;
namespace StellaOps.HybridLogicalClock.Benchmarks;
/// <summary>
/// Benchmarks for Hybrid Logical Clock operations.
/// HLC-010: Measures tick throughput and memory allocation.
///
/// To run: dotnet run -c Release
/// </summary>
[MemoryDiagnoser]
[SimpleJob(RunStrategy.Throughput, iterationCount: 10)]
public class HlcBenchmarks
{
private HybridLogicalClock _clock = null!;
private InMemoryHlcStateStore _stateStore = null!;
private FakeTimeProvider _timeProvider = null!;
private HlcTimestamp _remoteTimestamp;
[GlobalSetup]
public void Setup()
{
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
_stateStore = new InMemoryHlcStateStore();
_clock = new HybridLogicalClock(
_timeProvider,
"benchmark-node-1",
_stateStore);
// Pre-initialize the clock
_ = _clock.Tick();
// Create a remote timestamp for Receive benchmarks
_remoteTimestamp = new HlcTimestamp
{
PhysicalTime = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds(),
NodeId = "remote-node-1",
LogicalCounter = 5
};
}
/// <summary>
/// Benchmark single Tick operation throughput.
/// Measures the raw performance of generating a new HLC timestamp.
/// </summary>
[Benchmark(Baseline = true)]
public HlcTimestamp Tick()
{
return _clock.Tick();
}
/// <summary>
/// Benchmark Tick with time advancement.
/// Simulates real-world usage where physical time advances between ticks.
/// </summary>
[Benchmark]
public HlcTimestamp Tick_WithTimeAdvance()
{
_timeProvider.Advance(TimeSpan.FromMilliseconds(1));
return _clock.Tick();
}
/// <summary>
/// Benchmark Receive operation.
/// Measures performance of merging a remote timestamp.
/// </summary>
[Benchmark]
public HlcTimestamp Receive()
{
return _clock.Receive(_remoteTimestamp);
}
/// <summary>
/// Benchmark batch of 100 ticks.
/// Simulates high-throughput job scheduling scenarios.
/// </summary>
[Benchmark(OperationsPerInvoke = 100)]
public void Tick_Batch100()
{
for (int i = 0; i < 100; i++)
{
_ = _clock.Tick();
}
}
/// <summary>
/// Benchmark batch of 1000 ticks.
/// Stress test for very high throughput scenarios.
/// </summary>
[Benchmark(OperationsPerInvoke = 1000)]
public void Tick_Batch1000()
{
for (int i = 0; i < 1000; i++)
{
_ = _clock.Tick();
}
}
}

View File

@@ -0,0 +1,131 @@
// <copyright file="HlcTimestampBenchmarks.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
using BenchmarkDotNet.Attributes;
using BenchmarkDotNet.Engines;
namespace StellaOps.HybridLogicalClock.Benchmarks;
/// <summary>
/// Benchmarks for HlcTimestamp operations.
/// Measures parsing, serialization, and comparison performance.
/// </summary>
[MemoryDiagnoser]
[SimpleJob(RunStrategy.Throughput, iterationCount: 10)]
public class HlcTimestampBenchmarks
{
private HlcTimestamp _timestamp;
private string _sortableString = null!;
private string _jsonString = null!;
private HlcTimestamp[] _timestamps = null!;
private static readonly JsonSerializerOptions JsonOptions = new();
[GlobalSetup]
public void Setup()
{
_timestamp = new HlcTimestamp
{
PhysicalTime = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(),
NodeId = "scheduler-east-1",
LogicalCounter = 42
};
_sortableString = _timestamp.ToSortableString();
_jsonString = JsonSerializer.Serialize(_timestamp, JsonOptions);
// Generate array of timestamps for sorting benchmark
_timestamps = new HlcTimestamp[1000];
var random = new Random(42);
for (int i = 0; i < _timestamps.Length; i++)
{
_timestamps[i] = new HlcTimestamp
{
PhysicalTime = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() + random.Next(-1000, 1000),
NodeId = $"node-{random.Next(1, 10)}",
LogicalCounter = random.Next(0, 1000)
};
}
}
/// <summary>
/// Benchmark ToSortableString serialization.
/// </summary>
[Benchmark]
public string ToSortableString()
{
return _timestamp.ToSortableString();
}
/// <summary>
/// Benchmark Parse from sortable string.
/// </summary>
[Benchmark]
public HlcTimestamp Parse()
{
return HlcTimestamp.Parse(_sortableString);
}
/// <summary>
/// Benchmark TryParse from sortable string.
/// </summary>
[Benchmark]
public bool TryParse()
{
return HlcTimestamp.TryParse(_sortableString, out _);
}
/// <summary>
/// Benchmark full round-trip: serialize then parse.
/// </summary>
[Benchmark]
public HlcTimestamp RoundTrip()
{
var str = _timestamp.ToSortableString();
return HlcTimestamp.Parse(str);
}
/// <summary>
/// Benchmark JSON serialization.
/// </summary>
[Benchmark]
public string JsonSerialize()
{
return JsonSerializer.Serialize(_timestamp, JsonOptions);
}
/// <summary>
/// Benchmark JSON deserialization.
/// </summary>
[Benchmark]
public HlcTimestamp JsonDeserialize()
{
return JsonSerializer.Deserialize<HlcTimestamp>(_jsonString, JsonOptions);
}
/// <summary>
/// Benchmark CompareTo operation.
/// </summary>
[Benchmark]
public int CompareTo()
{
var other = new HlcTimestamp
{
PhysicalTime = _timestamp.PhysicalTime + 1,
NodeId = _timestamp.NodeId,
LogicalCounter = 0
};
return _timestamp.CompareTo(other);
}
/// <summary>
/// Benchmark sorting 1000 timestamps.
/// </summary>
[Benchmark]
public void Sort1000Timestamps()
{
var copy = (HlcTimestamp[])_timestamps.Clone();
Array.Sort(copy);
}
}

View File

@@ -0,0 +1,31 @@
// <copyright file="Program.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using BenchmarkDotNet.Configs;
using BenchmarkDotNet.Running;
namespace StellaOps.HybridLogicalClock.Benchmarks;
/// <summary>
/// Entry point for HLC benchmarks.
/// </summary>
public static class Program
{
/// <summary>
/// Run benchmarks.
/// Usage:
/// dotnet run -c Release # Run all benchmarks
/// dotnet run -c Release --filter "Tick" # Run only Tick benchmarks
/// dotnet run -c Release --list flat # List available benchmarks
/// </summary>
public static void Main(string[] args)
{
var config = DefaultConfig.Instance
.WithOptions(ConfigOptions.DisableOptimizationsValidator);
BenchmarkSwitcher
.FromAssembly(typeof(Program).Assembly)
.Run(args, config);
}
}

View File

@@ -0,0 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BenchmarkDotNet" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,142 @@
// <copyright file="HlcTimestampJsonConverterTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
using FluentAssertions;
namespace StellaOps.HybridLogicalClock.Tests;
/// <summary>
/// Unit tests for <see cref="HlcTimestampJsonConverter"/>.
/// </summary>
[Trait("Category", "Unit")]
public sealed class HlcTimestampJsonConverterTests
{
private readonly JsonSerializerOptions _options = new()
{
Converters = { new HlcTimestampJsonConverter() }
};
[Fact]
public void Serialize_ProducesSortableString()
{
// Arrange
var timestamp = new HlcTimestamp
{
PhysicalTime = 1704067200000,
NodeId = "node1",
LogicalCounter = 42
};
// Act
var json = JsonSerializer.Serialize(timestamp, _options);
// Assert
json.Should().Be("\"1704067200000-node1-000042\"");
}
[Fact]
public void Deserialize_ParsesSortableString()
{
// Arrange
var json = "\"1704067200000-node1-000042\"";
// Act
var result = JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
// Assert
result.PhysicalTime.Should().Be(1704067200000);
result.NodeId.Should().Be("node1");
result.LogicalCounter.Should().Be(42);
}
[Fact]
public void RoundTrip_PreservesValues()
{
// Arrange
var original = new HlcTimestamp
{
PhysicalTime = 1704067200000,
NodeId = "scheduler-east-1",
LogicalCounter = 999
};
// Act
var json = JsonSerializer.Serialize(original, _options);
var deserialized = JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
// Assert
deserialized.Should().Be(original);
}
[Fact]
public void Deserialize_Null_ReturnsZero()
{
// Arrange
var json = "null";
// Act
var result = JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
// Assert
result.Should().Be(HlcTimestamp.Zero);
}
[Fact]
public void Deserialize_InvalidFormat_ThrowsJsonException()
{
// Arrange
var json = "\"invalid\"";
// Act
var act = () => JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
// Assert
act.Should().Throw<JsonException>();
}
[Fact]
public void Deserialize_WrongTokenType_ThrowsJsonException()
{
// Arrange
var json = "12345"; // number, not string
// Act
var act = () => JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
// Assert
act.Should().Throw<JsonException>();
}
[Fact]
public void SerializeInObject_WorksCorrectly()
{
// Arrange
var obj = new TestWrapper
{
Timestamp = new HlcTimestamp
{
PhysicalTime = 1704067200000,
NodeId = "node1",
LogicalCounter = 1
},
Name = "Test"
};
// Act
var json = JsonSerializer.Serialize(obj, _options);
var deserialized = JsonSerializer.Deserialize<TestWrapper>(json, _options);
// Assert
deserialized.Should().NotBeNull();
deserialized!.Timestamp.Should().Be(obj.Timestamp);
deserialized.Name.Should().Be(obj.Name);
}
private sealed class TestWrapper
{
public HlcTimestamp Timestamp { get; set; }
public string? Name { get; set; }
}
}

View File

@@ -0,0 +1,366 @@
// <copyright file="HlcTimestampTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
namespace StellaOps.HybridLogicalClock.Tests;
/// <summary>
/// Unit tests for <see cref="HlcTimestamp"/>.
/// </summary>
[Trait("Category", "Unit")]
public sealed class HlcTimestampTests
{
[Fact]
public void ToSortableString_FormatsCorrectly()
{
// Arrange
var timestamp = new HlcTimestamp
{
PhysicalTime = 1704067200000, // 2024-01-01 00:00:00 UTC
NodeId = "scheduler-east-1",
LogicalCounter = 42
};
// Act
var result = timestamp.ToSortableString();
// Assert
result.Should().Be("1704067200000-scheduler-east-1-000042");
}
[Fact]
public void Parse_RoundTrip_PreservesValues()
{
// Arrange
var original = new HlcTimestamp
{
PhysicalTime = 1704067200000,
NodeId = "scheduler-east-1",
LogicalCounter = 42
};
// Act
var serialized = original.ToSortableString();
var parsed = HlcTimestamp.Parse(serialized);
// Assert
parsed.Should().Be(original);
parsed.PhysicalTime.Should().Be(original.PhysicalTime);
parsed.NodeId.Should().Be(original.NodeId);
parsed.LogicalCounter.Should().Be(original.LogicalCounter);
}
[Fact]
public void Parse_WithHyphensInNodeId_ParsesCorrectly()
{
// Arrange - NodeId contains multiple hyphens
var original = new HlcTimestamp
{
PhysicalTime = 1704067200000,
NodeId = "scheduler-east-1-prod",
LogicalCounter = 123
};
// Act
var serialized = original.ToSortableString();
var parsed = HlcTimestamp.Parse(serialized);
// Assert
parsed.NodeId.Should().Be("scheduler-east-1-prod");
}
[Fact]
public void TryParse_ValidString_ReturnsTrue()
{
// Act
var result = HlcTimestamp.TryParse("1704067200000-node1-000001", out var timestamp);
// Assert
result.Should().BeTrue();
timestamp.PhysicalTime.Should().Be(1704067200000);
timestamp.NodeId.Should().Be("node1");
timestamp.LogicalCounter.Should().Be(1);
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData("invalid")]
[InlineData("abc-node-001")]
[InlineData("1234567890123--000001")]
[InlineData("1234567890123-node-abc")]
public void TryParse_InvalidString_ReturnsFalse(string? input)
{
// Act
var result = HlcTimestamp.TryParse(input, out _);
// Assert
result.Should().BeFalse();
}
[Fact]
public void Parse_InvalidString_ThrowsFormatException()
{
// Act
var act = () => HlcTimestamp.Parse("invalid");
// Assert
act.Should().Throw<FormatException>();
}
[Fact]
public void Parse_Null_ThrowsArgumentNullException()
{
// Act
var act = () => HlcTimestamp.Parse(null!);
// Assert
act.Should().Throw<ArgumentNullException>();
}
[Fact]
public void CompareTo_SamePhysicalTime_HigherCounterIsGreater()
{
// Arrange
var earlier = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
var later = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 2
};
// Act & Assert
earlier.CompareTo(later).Should().BeLessThan(0);
later.CompareTo(earlier).Should().BeGreaterThan(0);
(earlier < later).Should().BeTrue();
(later > earlier).Should().BeTrue();
}
[Fact]
public void CompareTo_DifferentPhysicalTime_HigherTimeIsGreater()
{
// Arrange
var earlier = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 999
};
var later = new HlcTimestamp
{
PhysicalTime = 1001,
NodeId = "node1",
LogicalCounter = 0
};
// Act & Assert
earlier.CompareTo(later).Should().BeLessThan(0);
later.CompareTo(earlier).Should().BeGreaterThan(0);
}
[Fact]
public void CompareTo_SameTimeAndCounter_NodeIdBreaksTie()
{
// Arrange
var a = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "aaa",
LogicalCounter = 1
};
var b = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "bbb",
LogicalCounter = 1
};
// Act & Assert
a.CompareTo(b).Should().BeLessThan(0);
b.CompareTo(a).Should().BeGreaterThan(0);
}
[Fact]
public void CompareTo_Equal_ReturnsZero()
{
// Arrange
var a = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
var b = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
// Act & Assert
a.CompareTo(b).Should().Be(0);
(a <= b).Should().BeTrue();
(a >= b).Should().BeTrue();
}
[Fact]
public void Zero_HasExpectedValues()
{
// Act
var zero = HlcTimestamp.Zero;
// Assert
zero.PhysicalTime.Should().Be(0);
zero.NodeId.Should().BeEmpty();
zero.LogicalCounter.Should().Be(0);
}
[Fact]
public void PhysicalDateTime_ConvertsCorrectly()
{
// Arrange
var timestamp = new HlcTimestamp
{
PhysicalTime = 1704067200000, // 2024-01-01 00:00:00 UTC
NodeId = "node1",
LogicalCounter = 0
};
// Act
var dateTime = timestamp.PhysicalDateTime;
// Assert
dateTime.Should().Be(new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero));
}
[Fact]
public void Equality_SameValues_AreEqual()
{
// Arrange
var a = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
var b = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
// Assert
a.Should().Be(b);
(a == b).Should().BeTrue();
a.GetHashCode().Should().Be(b.GetHashCode());
}
[Fact]
public void Equality_DifferentValues_AreNotEqual()
{
// Arrange
var a = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
var b = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 2
};
// Assert
a.Should().NotBe(b);
(a != b).Should().BeTrue();
}
[Fact]
public void ToString_ReturnsSortableString()
{
// Arrange
var timestamp = new HlcTimestamp
{
PhysicalTime = 1704067200000,
NodeId = "node1",
LogicalCounter = 42
};
// Act
var result = timestamp.ToString();
// Assert
result.Should().Be(timestamp.ToSortableString());
}
[Fact]
public void CompareTo_ObjectOverload_WorksCorrectly()
{
// Arrange
var a = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
object b = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 2
};
// Act
var result = a.CompareTo(b);
// Assert
result.Should().BeLessThan(0);
}
[Fact]
public void CompareTo_Null_ReturnsPositive()
{
// Arrange
var timestamp = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
// Act
var result = timestamp.CompareTo(null);
// Assert
result.Should().BeGreaterThan(0);
}
[Fact]
public void CompareTo_WrongType_ThrowsArgumentException()
{
// Arrange
var timestamp = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
// Act
var act = () => timestamp.CompareTo("not a timestamp");
// Assert
act.Should().Throw<ArgumentException>();
}
}

View File

@@ -0,0 +1,376 @@
// <copyright file="HybridLogicalClockTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
namespace StellaOps.HybridLogicalClock.Tests;
/// <summary>
/// Unit tests for <see cref="HybridLogicalClock"/>.
/// </summary>
[Trait("Category", "Unit")]
public sealed class HybridLogicalClockTests
{
private const string TestNodeId = "test-node-1";
[Fact]
public void Tick_Monotonic_SuccessiveTicksAlwaysIncrease()
{
// Arrange
var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
var timestamps = Enumerable.Range(0, 100)
.Select(_ => clock.Tick())
.ToList();
// Assert
for (var i = 1; i < timestamps.Count; i++)
{
timestamps[i].Should().BeGreaterThan(timestamps[i - 1],
$"Timestamp {i} should be greater than timestamp {i - 1}");
}
}
[Fact]
public void Tick_SamePhysicalTime_IncrementsCounter()
{
// Arrange
var fixedTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(fixedTime);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
var first = clock.Tick();
var second = clock.Tick();
var third = clock.Tick();
// Assert
first.LogicalCounter.Should().Be(0);
second.LogicalCounter.Should().Be(1);
third.LogicalCounter.Should().Be(2);
// All should have same physical time
first.PhysicalTime.Should().Be(second.PhysicalTime);
second.PhysicalTime.Should().Be(third.PhysicalTime);
}
[Fact]
public void Tick_NewPhysicalTime_ResetsCounter()
{
// Arrange
var startTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(startTime);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act - generate some ticks
clock.Tick();
clock.Tick();
var beforeAdvance = clock.Tick();
// Advance time
timeProvider.Advance(TimeSpan.FromMilliseconds(1));
var afterAdvance = clock.Tick();
// Assert
beforeAdvance.LogicalCounter.Should().Be(2);
afterAdvance.LogicalCounter.Should().Be(0);
afterAdvance.PhysicalTime.Should().BeGreaterThan(beforeAdvance.PhysicalTime);
}
[Fact]
public void Tick_NodeId_IsCorrectlySet()
{
// Arrange
var timeProvider = new FakeTimeProvider();
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, "my-custom-node", stateStore);
// Act
var timestamp = clock.Tick();
// Assert
timestamp.NodeId.Should().Be("my-custom-node");
clock.NodeId.Should().Be("my-custom-node");
}
[Fact]
public void Receive_RemoteTimestampAhead_MergesCorrectly()
{
// Arrange
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(localTime);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Local tick first
var localTick = clock.Tick();
// Remote timestamp is 100ms ahead
var remote = new HlcTimestamp
{
PhysicalTime = localTime.AddMilliseconds(100).ToUnixTimeMilliseconds(),
NodeId = "remote-node",
LogicalCounter = 5
};
// Act
var result = clock.Receive(remote);
// Assert
result.PhysicalTime.Should().Be(remote.PhysicalTime);
result.LogicalCounter.Should().Be(6); // remote counter + 1
result.NodeId.Should().Be(TestNodeId);
}
[Fact]
public void Receive_LocalTimestampAhead_MergesCorrectly()
{
// Arrange
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(localTime);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Generate several local ticks to advance counter
clock.Tick();
clock.Tick();
var localState = clock.Tick();
// Remote timestamp is behind
var remote = new HlcTimestamp
{
PhysicalTime = localTime.AddMilliseconds(-100).ToUnixTimeMilliseconds(),
NodeId = "remote-node",
LogicalCounter = 0
};
// Act
var result = clock.Receive(remote);
// Assert
result.PhysicalTime.Should().Be(localState.PhysicalTime);
result.LogicalCounter.Should().Be(localState.LogicalCounter + 1);
}
[Fact]
public void Receive_SamePhysicalTime_MergesCounters()
{
// Arrange
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(localTime);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Local tick
clock.Tick();
clock.Tick();
var localState = clock.Current; // counter = 1
// Remote timestamp with same physical time but higher counter
var remote = new HlcTimestamp
{
PhysicalTime = localTime.ToUnixTimeMilliseconds(),
NodeId = "remote-node",
LogicalCounter = 10
};
// Act
var result = clock.Receive(remote);
// Assert
result.PhysicalTime.Should().Be(localTime.ToUnixTimeMilliseconds());
result.LogicalCounter.Should().Be(11); // max(local, remote) + 1
}
[Fact]
public void Receive_ClockSkewExceeded_ThrowsException()
{
// Arrange
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(localTime);
var stateStore = new InMemoryHlcStateStore();
var maxSkew = TimeSpan.FromMinutes(1);
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, maxSkew);
// Remote timestamp is 2 minutes ahead (exceeds 1 minute tolerance)
var remote = new HlcTimestamp
{
PhysicalTime = localTime.AddMinutes(2).ToUnixTimeMilliseconds(),
NodeId = "remote-node",
LogicalCounter = 0
};
// Act
var act = () => clock.Receive(remote);
// Assert
act.Should().Throw<HlcClockSkewException>()
.Where(e => e.MaxAllowedSkew == maxSkew)
.Where(e => e.ObservedSkew > maxSkew);
}
[Fact]
public void Current_ReturnsLatestState()
{
// Arrange
var timeProvider = new FakeTimeProvider();
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
var tick1 = clock.Tick();
var current1 = clock.Current;
var tick2 = clock.Tick();
var current2 = clock.Current;
// Assert
current1.Should().Be(tick1);
current2.Should().Be(tick2);
}
[Fact]
public async Task InitializeAsync_NoPersistedState_StartsFromCurrentTime()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var startTime = new DateTimeOffset(2024, 1, 1, 12, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(startTime);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
var recovered = await clock.InitializeAsync(ct);
// Assert
recovered.Should().BeFalse();
clock.Current.PhysicalTime.Should().Be(startTime.ToUnixTimeMilliseconds());
clock.Current.LogicalCounter.Should().Be(0);
}
[Fact]
public async Task InitializeAsync_WithPersistedState_ResumesFromPersisted()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var startTime = new DateTimeOffset(2024, 1, 1, 12, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(startTime);
var stateStore = new InMemoryHlcStateStore();
// Pre-persist state
var persistedState = new HlcTimestamp
{
PhysicalTime = startTime.ToUnixTimeMilliseconds(),
NodeId = TestNodeId,
LogicalCounter = 50
};
await stateStore.SaveAsync(persistedState, ct);
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
var recovered = await clock.InitializeAsync(ct);
var firstTick = clock.Tick();
// Assert
recovered.Should().BeTrue();
firstTick.LogicalCounter.Should().BeGreaterThan(50); // Should continue from persisted + 1
}
[Fact]
public async Task InitializeAsync_PersistedStateOlderThanCurrent_UsesCurrentTime()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var startTime = new DateTimeOffset(2024, 1, 1, 12, 0, 0, TimeSpan.Zero);
var timeProvider = new FakeTimeProvider(startTime);
var stateStore = new InMemoryHlcStateStore();
// Pre-persist OLD state
var persistedState = new HlcTimestamp
{
PhysicalTime = startTime.AddHours(-1).ToUnixTimeMilliseconds(),
NodeId = TestNodeId,
LogicalCounter = 1000
};
await stateStore.SaveAsync(persistedState, ct);
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
await clock.InitializeAsync(ct);
var firstTick = clock.Tick();
// Assert
// Should use current physical time since it's greater
firstTick.PhysicalTime.Should().Be(startTime.ToUnixTimeMilliseconds());
firstTick.LogicalCounter.Should().Be(1); // Reset because physical time advanced
}
[Fact]
public async Task Tick_PersistsState()
{
// Arrange
var ct = TestContext.Current.CancellationToken;
var timeProvider = new FakeTimeProvider();
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore);
// Act
var tick = clock.Tick();
// Wait a bit for fire-and-forget persistence
await Task.Delay(50, ct);
// Assert
stateStore.Count.Should().Be(1);
}
[Fact]
public void Constructor_NullTimeProvider_ThrowsArgumentNullException()
{
// Arrange & Act
var act = () => new HybridLogicalClock(null!, TestNodeId, new InMemoryHlcStateStore());
// Assert
act.Should().Throw<ArgumentNullException>()
.WithParameterName("timeProvider");
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void Constructor_InvalidNodeId_ThrowsArgumentException(string? nodeId)
{
// Arrange & Act
var act = () => new HybridLogicalClock(
new FakeTimeProvider(),
nodeId!,
new InMemoryHlcStateStore());
// Assert
act.Should().Throw<ArgumentException>();
}
[Fact]
public void Constructor_NullStateStore_ThrowsArgumentNullException()
{
// Arrange & Act
var act = () => new HybridLogicalClock(
new FakeTimeProvider(),
TestNodeId,
null!);
// Assert
act.Should().Throw<ArgumentNullException>()
.WithParameterName("stateStore");
}
}

View File

@@ -0,0 +1,168 @@
// <copyright file="InMemoryHlcStateStoreTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using FluentAssertions;
using Xunit;
namespace StellaOps.HybridLogicalClock.Tests;
/// <summary>
/// Unit tests for <see cref="InMemoryHlcStateStore"/>.
/// </summary>
[Trait("Category", "Unit")]
public sealed class InMemoryHlcStateStoreTests
{
[Fact]
public async Task LoadAsync_NoState_ReturnsNull()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
// Act
var result = await store.LoadAsync("node1", ct);
// Assert
result.Should().BeNull();
}
[Fact]
public async Task SaveAsync_ThenLoadAsync_ReturnsState()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
var timestamp = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 5
};
// Act
await store.SaveAsync(timestamp, ct);
var result = await store.LoadAsync("node1", ct);
// Assert
result.Should().Be(timestamp);
}
[Fact]
public async Task SaveAsync_GreaterTimestamp_Updates()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
var first = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 5
};
var second = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 10
};
// Act
await store.SaveAsync(first, ct);
await store.SaveAsync(second, ct);
var result = await store.LoadAsync("node1", ct);
// Assert
result.Should().Be(second);
}
[Fact]
public async Task SaveAsync_SmallerTimestamp_DoesNotUpdate()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
var first = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 10
};
var second = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 5
};
// Act
await store.SaveAsync(first, ct);
await store.SaveAsync(second, ct);
var result = await store.LoadAsync("node1", ct);
// Assert
result.Should().Be(first);
}
[Fact]
public async Task SaveAsync_MultipleNodes_Isolated()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
var node1State = new HlcTimestamp
{
PhysicalTime = 1000,
NodeId = "node1",
LogicalCounter = 1
};
var node2State = new HlcTimestamp
{
PhysicalTime = 2000,
NodeId = "node2",
LogicalCounter = 2
};
// Act
await store.SaveAsync(node1State, ct);
await store.SaveAsync(node2State, ct);
// Assert
var loaded1 = await store.LoadAsync("node1", ct);
var loaded2 = await store.LoadAsync("node2", ct);
loaded1.Should().Be(node1State);
loaded2.Should().Be(node2State);
store.Count.Should().Be(2);
}
[Fact]
public async Task Clear_RemovesAllState()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
await store.SaveAsync(new HlcTimestamp { PhysicalTime = 1, NodeId = "n1", LogicalCounter = 0 }, ct);
await store.SaveAsync(new HlcTimestamp { PhysicalTime = 2, NodeId = "n2", LogicalCounter = 0 }, ct);
// Act
store.Clear();
// Assert
store.Count.Should().Be(0);
}
[Fact]
public async Task LoadAsync_NullNodeId_ThrowsArgumentNullException()
{
// Arrange
var store = new InMemoryHlcStateStore();
var ct = TestContext.Current.CancellationToken;
// Act
var act = () => store.LoadAsync(null!, ct);
// Assert
await act.Should().ThrowAsync<ArgumentNullException>();
}
}

View File

@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
<PackageReference Include="Microsoft.NET.Test.Sdk" />
<PackageReference Include="Moq" />
<PackageReference Include="xunit.v3" />
<PackageReference Include="xunit.runner.visualstudio">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,71 @@
// <copyright file="HlcClockSkewException.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Exception thrown when clock skew exceeds the configured tolerance.
/// </summary>
/// <remarks>
/// <para>
/// This exception indicates that a remote timestamp differs from the local
/// physical clock by more than the configured maximum skew tolerance.
/// This typically indicates:
/// </para>
/// <list type="bullet">
/// <item><description>NTP synchronization failure on one or more nodes</description></item>
/// <item><description>Malicious/corrupted remote timestamp</description></item>
/// <item><description>Overly aggressive skew tolerance configuration</description></item>
/// </list>
/// </remarks>
public sealed class HlcClockSkewException : Exception
{
/// <summary>
/// Initializes a new instance of the <see cref="HlcClockSkewException"/> class.
/// </summary>
/// <param name="observedSkew">The observed clock skew.</param>
/// <param name="maxAllowedSkew">The maximum allowed skew.</param>
public HlcClockSkewException(TimeSpan observedSkew, TimeSpan maxAllowedSkew)
: base($"Clock skew of {observedSkew.TotalMilliseconds:F0}ms exceeds maximum allowed {maxAllowedSkew.TotalMilliseconds:F0}ms")
{
ObservedSkew = observedSkew;
MaxAllowedSkew = maxAllowedSkew;
}
/// <summary>
/// Initializes a new instance of the <see cref="HlcClockSkewException"/> class.
/// </summary>
/// <param name="message">The error message.</param>
public HlcClockSkewException(string message)
: base(message)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="HlcClockSkewException"/> class.
/// </summary>
/// <param name="message">The error message.</param>
/// <param name="innerException">The inner exception.</param>
public HlcClockSkewException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="HlcClockSkewException"/> class.
/// </summary>
public HlcClockSkewException()
{
}
/// <summary>
/// Gets the observed clock skew.
/// </summary>
public TimeSpan ObservedSkew { get; }
/// <summary>
/// Gets the maximum allowed clock skew.
/// </summary>
public TimeSpan MaxAllowedSkew { get; }
}

View File

@@ -0,0 +1,77 @@
// <copyright file="HlcOptions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.ComponentModel.DataAnnotations;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Configuration options for the Hybrid Logical Clock.
/// </summary>
public sealed class HlcOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "HybridLogicalClock";
/// <summary>
/// Gets or sets the unique node identifier.
/// </summary>
/// <remarks>
/// Should be stable across restarts (e.g., "scheduler-east-1").
/// If not set, will be auto-generated from machine name and process ID.
/// </remarks>
public string? NodeId { get; set; }
/// <summary>
/// Gets or sets the maximum allowed clock skew.
/// </summary>
/// <remarks>
/// Remote timestamps differing by more than this from local physical clock
/// will be rejected with <see cref="HlcClockSkewException"/>.
/// Default: 1 minute.
/// </remarks>
[Range(typeof(TimeSpan), "00:00:01", "01:00:00")]
public TimeSpan MaxClockSkew { get; set; } = TimeSpan.FromMinutes(1);
/// <summary>
/// Gets or sets the PostgreSQL connection string for state persistence.
/// </summary>
/// <remarks>
/// If null, uses in-memory state store (state lost on restart).
/// </remarks>
public string? PostgresConnectionString { get; set; }
/// <summary>
/// Gets or sets the PostgreSQL schema for HLC tables.
/// </summary>
public string PostgresSchema { get; set; } = "scheduler";
/// <summary>
/// Gets or sets whether to use in-memory state store.
/// </summary>
/// <remarks>
/// If true, state is not persisted. Useful for testing.
/// If false and PostgresConnectionString is set, uses PostgreSQL.
/// </remarks>
public bool UseInMemoryStore { get; set; }
/// <summary>
/// Gets the effective node ID, generating one if not configured.
/// </summary>
/// <returns>The node ID to use.</returns>
public string GetEffectiveNodeId()
{
if (!string.IsNullOrWhiteSpace(NodeId))
{
return NodeId;
}
// Generate deterministic node ID from machine name and some unique identifier
var machineName = Environment.MachineName.ToLowerInvariant();
var processId = Environment.ProcessId;
return $"{machineName}-{processId}";
}
}

View File

@@ -0,0 +1,127 @@
// <copyright file="HlcServiceCollectionExtensions.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Extension methods for registering HLC services with dependency injection.
/// </summary>
public static class HlcServiceCollectionExtensions
{
/// <summary>
/// Adds Hybrid Logical Clock services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Optional action to configure HLC options.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddHybridLogicalClock(
this IServiceCollection services,
Action<HlcOptions>? configureOptions = null)
{
ArgumentNullException.ThrowIfNull(services);
// Register options
if (configureOptions is not null)
{
services.Configure(configureOptions);
}
services.AddOptions<HlcOptions>()
.ValidateDataAnnotations()
.ValidateOnStart();
// Register Dapper type handler
HlcTimestampTypeHandler.Register();
// Register TimeProvider if not already registered
services.TryAddSingleton(TimeProvider.System);
// Register state store based on configuration
services.AddSingleton<IHlcStateStore>(sp =>
{
var options = sp.GetRequiredService<IOptions<HlcOptions>>().Value;
if (options.UseInMemoryStore)
{
return new InMemoryHlcStateStore();
}
if (!string.IsNullOrEmpty(options.PostgresConnectionString))
{
var logger = sp.GetService<ILogger<PostgresHlcStateStore>>();
return new PostgresHlcStateStore(
options.PostgresConnectionString,
options.PostgresSchema,
logger);
}
// Default to in-memory if no connection string
return new InMemoryHlcStateStore();
});
// Register the clock
services.AddSingleton<IHybridLogicalClock>(sp =>
{
var options = sp.GetRequiredService<IOptions<HlcOptions>>().Value;
var timeProvider = sp.GetRequiredService<TimeProvider>();
var stateStore = sp.GetRequiredService<IHlcStateStore>();
var logger = sp.GetService<ILogger<HybridLogicalClock>>();
var clock = new HybridLogicalClock(
timeProvider,
options.GetEffectiveNodeId(),
stateStore,
options.MaxClockSkew,
logger);
return clock;
});
return services;
}
/// <summary>
/// Adds Hybrid Logical Clock services with a specific node ID.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="nodeId">The node identifier.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddHybridLogicalClock(
this IServiceCollection services,
string nodeId)
{
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
return services.AddHybridLogicalClock(options =>
{
options.NodeId = nodeId;
});
}
/// <summary>
/// Initializes the HLC clock from persistent state.
/// Should be called during application startup.
/// </summary>
/// <param name="serviceProvider">The service provider.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>A task representing the async operation.</returns>
public static async Task InitializeHlcAsync(
this IServiceProvider serviceProvider,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(serviceProvider);
var clock = serviceProvider.GetRequiredService<IHybridLogicalClock>();
if (clock is HybridLogicalClock hlc)
{
await hlc.InitializeAsync(ct).ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,222 @@
// <copyright file="HlcTimestamp.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Text.Json.Serialization;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Hybrid Logical Clock timestamp providing monotonic, causally-ordered time
/// across distributed nodes even under clock skew.
/// </summary>
/// <remarks>
/// <para>
/// HLC combines the benefits of physical time (human-readable, bounded drift)
/// with logical clocks (guaranteed causality, no rollback). The timestamp
/// consists of three components:
/// </para>
/// <list type="bullet">
/// <item><description>PhysicalTime: Unix milliseconds UTC, advances with wall clock</description></item>
/// <item><description>NodeId: Unique identifier for the generating node</description></item>
/// <item><description>LogicalCounter: Increments when events occur at same physical time</description></item>
/// </list>
/// <para>
/// Total ordering is defined as: (PhysicalTime, LogicalCounter, NodeId)
/// </para>
/// </remarks>
[JsonConverter(typeof(HlcTimestampJsonConverter))]
public readonly record struct HlcTimestamp : IComparable<HlcTimestamp>, IComparable
{
/// <summary>
/// Physical time component (Unix milliseconds UTC).
/// </summary>
public required long PhysicalTime { get; init; }
/// <summary>
/// Unique node identifier (e.g., "scheduler-east-1").
/// </summary>
public required string NodeId { get; init; }
/// <summary>
/// Logical counter for events at same physical time.
/// </summary>
public required int LogicalCounter { get; init; }
/// <summary>
/// Gets the physical time as a <see cref="DateTimeOffset"/>.
/// </summary>
[JsonIgnore]
public DateTimeOffset PhysicalDateTime =>
DateTimeOffset.FromUnixTimeMilliseconds(PhysicalTime);
/// <summary>
/// Gets a zero/uninitialized timestamp.
/// </summary>
public static HlcTimestamp Zero => new()
{
PhysicalTime = 0,
NodeId = string.Empty,
LogicalCounter = 0
};
/// <summary>
/// String representation for storage: "0001704067200000-scheduler-east-1-000042".
/// Format: {PhysicalTime:D13}-{NodeId}-{LogicalCounter:D6}
/// </summary>
/// <returns>A sortable string representation.</returns>
public string ToSortableString()
{
return string.Create(
CultureInfo.InvariantCulture,
$"{PhysicalTime:D13}-{NodeId}-{LogicalCounter:D6}");
}
/// <summary>
/// Parse from sortable string format.
/// </summary>
/// <param name="value">The sortable string to parse.</param>
/// <returns>The parsed <see cref="HlcTimestamp"/>.</returns>
/// <exception cref="ArgumentNullException">Thrown when value is null.</exception>
/// <exception cref="FormatException">Thrown when value is not in valid format.</exception>
public static HlcTimestamp Parse(string value)
{
ArgumentNullException.ThrowIfNull(value);
if (!TryParse(value, out var result))
{
throw new FormatException($"Invalid HLC timestamp format: '{value}'");
}
return result;
}
/// <summary>
/// Try to parse from sortable string format.
/// </summary>
/// <param name="value">The sortable string to parse.</param>
/// <param name="result">The parsed timestamp if successful.</param>
/// <returns>True if parsing succeeded; otherwise false.</returns>
public static bool TryParse(
[NotNullWhen(true)] string? value,
out HlcTimestamp result)
{
result = default;
if (string.IsNullOrEmpty(value))
{
return false;
}
// Format: {PhysicalTime:D13}-{NodeId}-{LogicalCounter:D6}
// Example: 0001704067200000-scheduler-east-1-000042
// The NodeId can contain hyphens, so we parse from both ends
var firstDash = value.IndexOf('-', StringComparison.Ordinal);
if (firstDash < 1)
{
return false;
}
var lastDash = value.LastIndexOf('-');
if (lastDash <= firstDash || lastDash >= value.Length - 1)
{
return false;
}
var physicalTimeStr = value[..firstDash];
var nodeId = value[(firstDash + 1)..lastDash];
var counterStr = value[(lastDash + 1)..];
if (!long.TryParse(physicalTimeStr, NumberStyles.None, CultureInfo.InvariantCulture, out var physicalTime))
{
return false;
}
if (string.IsNullOrEmpty(nodeId))
{
return false;
}
if (!int.TryParse(counterStr, NumberStyles.None, CultureInfo.InvariantCulture, out var counter))
{
return false;
}
result = new HlcTimestamp
{
PhysicalTime = physicalTime,
NodeId = nodeId,
LogicalCounter = counter
};
return true;
}
/// <summary>
/// Compare for total ordering.
/// Order: (PhysicalTime, LogicalCounter, NodeId).
/// </summary>
/// <param name="other">The other timestamp to compare.</param>
/// <returns>Comparison result.</returns>
public int CompareTo(HlcTimestamp other)
{
// Primary: physical time
var physicalCompare = PhysicalTime.CompareTo(other.PhysicalTime);
if (physicalCompare != 0)
{
return physicalCompare;
}
// Secondary: logical counter
var counterCompare = LogicalCounter.CompareTo(other.LogicalCounter);
if (counterCompare != 0)
{
return counterCompare;
}
// Tertiary: node ID (for stable tie-breaking)
return string.Compare(NodeId, other.NodeId, StringComparison.Ordinal);
}
/// <inheritdoc/>
public int CompareTo(object? obj)
{
if (obj is null)
{
return 1;
}
if (obj is HlcTimestamp other)
{
return CompareTo(other);
}
throw new ArgumentException($"Object must be of type {nameof(HlcTimestamp)}", nameof(obj));
}
/// <summary>
/// Less than operator.
/// </summary>
public static bool operator <(HlcTimestamp left, HlcTimestamp right) => left.CompareTo(right) < 0;
/// <summary>
/// Less than or equal operator.
/// </summary>
public static bool operator <=(HlcTimestamp left, HlcTimestamp right) => left.CompareTo(right) <= 0;
/// <summary>
/// Greater than operator.
/// </summary>
public static bool operator >(HlcTimestamp left, HlcTimestamp right) => left.CompareTo(right) > 0;
/// <summary>
/// Greater than or equal operator.
/// </summary>
public static bool operator >=(HlcTimestamp left, HlcTimestamp right) => left.CompareTo(right) >= 0;
/// <inheritdoc/>
public override string ToString() => ToSortableString();
}

View File

@@ -0,0 +1,60 @@
// <copyright file="HlcTimestampJsonConverter.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// JSON converter for <see cref="HlcTimestamp"/> using sortable string format.
/// </summary>
/// <remarks>
/// <para>
/// Serializes to and deserializes from the sortable string format:
/// "{PhysicalTime:D13}-{NodeId}-{LogicalCounter:D6}"
/// </para>
/// <para>
/// Example: "0001704067200000-scheduler-east-1-000042"
/// </para>
/// </remarks>
public sealed class HlcTimestampJsonConverter : JsonConverter<HlcTimestamp>
{
/// <inheritdoc/>
public override HlcTimestamp Read(
ref Utf8JsonReader reader,
Type typeToConvert,
JsonSerializerOptions options)
{
if (reader.TokenType == JsonTokenType.Null)
{
return HlcTimestamp.Zero;
}
if (reader.TokenType != JsonTokenType.String)
{
throw new JsonException($"Expected string token for HlcTimestamp, got {reader.TokenType}");
}
var value = reader.GetString();
if (!HlcTimestamp.TryParse(value, out var result))
{
throw new JsonException($"Invalid HlcTimestamp format: '{value}'");
}
return result;
}
/// <inheritdoc/>
public override void Write(
Utf8JsonWriter writer,
HlcTimestamp value,
JsonSerializerOptions options)
{
ArgumentNullException.ThrowIfNull(writer);
writer.WriteStringValue(value.ToSortableString());
}
}

View File

@@ -0,0 +1,59 @@
// <copyright file="HlcTimestampTypeHandler.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Data;
using Dapper;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Dapper type handler for <see cref="HlcTimestamp"/>.
/// </summary>
/// <remarks>
/// <para>
/// Maps HlcTimestamp to/from TEXT column using sortable string format.
/// Register with: <c>SqlMapper.AddTypeHandler(new HlcTimestampTypeHandler());</c>
/// </para>
/// </remarks>
public sealed class HlcTimestampTypeHandler : SqlMapper.TypeHandler<HlcTimestamp>
{
/// <summary>
/// Gets the singleton instance of the type handler.
/// </summary>
public static HlcTimestampTypeHandler Instance { get; } = new();
/// <summary>
/// Registers this type handler with Dapper.
/// Should be called once at application startup.
/// </summary>
public static void Register()
{
SqlMapper.AddTypeHandler(Instance);
}
/// <inheritdoc/>
public override HlcTimestamp Parse(object value)
{
if (value is null or DBNull)
{
return HlcTimestamp.Zero;
}
if (value is string strValue)
{
return HlcTimestamp.Parse(strValue);
}
throw new DataException($"Cannot convert {value.GetType().Name} to HlcTimestamp");
}
/// <inheritdoc/>
public override void SetValue(IDbDataParameter parameter, HlcTimestamp value)
{
ArgumentNullException.ThrowIfNull(parameter);
parameter.DbType = DbType.String;
parameter.Value = value.ToSortableString();
}
}

View File

@@ -0,0 +1,272 @@
// <copyright file="HybridLogicalClock.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Default implementation of <see cref="IHybridLogicalClock"/>.
/// </summary>
/// <remarks>
/// <para>
/// Implements the Hybrid Logical Clock algorithm which combines physical time
/// with logical counters to provide:
/// </para>
/// <list type="bullet">
/// <item><description>Monotonicity: timestamps always increase</description></item>
/// <item><description>Causality: if A happens-before B, then HLC(A) &lt; HLC(B)</description></item>
/// <item><description>Bounded drift: physical component stays close to wall clock</description></item>
/// </list>
/// <para>
/// Thread-safety is guaranteed via internal locking.
/// </para>
/// </remarks>
public sealed class HybridLogicalClock : IHybridLogicalClock
{
private readonly TimeProvider _timeProvider;
private readonly IHlcStateStore _stateStore;
private readonly TimeSpan _maxClockSkew;
private readonly ILogger<HybridLogicalClock> _logger;
private readonly object _lock = new();
private long _lastPhysicalTime;
private int _logicalCounter;
/// <summary>
/// Initializes a new instance of the <see cref="HybridLogicalClock"/> class.
/// </summary>
/// <param name="timeProvider">Time provider for physical clock.</param>
/// <param name="nodeId">Unique identifier for this node.</param>
/// <param name="stateStore">Persistent state store.</param>
/// <param name="maxClockSkew">Maximum allowed clock skew (default: 1 minute).</param>
/// <param name="logger">Optional logger.</param>
public HybridLogicalClock(
TimeProvider timeProvider,
string nodeId,
IHlcStateStore stateStore,
TimeSpan? maxClockSkew = null,
ILogger<HybridLogicalClock>? logger = null)
{
ArgumentNullException.ThrowIfNull(timeProvider);
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
ArgumentNullException.ThrowIfNull(stateStore);
_timeProvider = timeProvider;
NodeId = nodeId;
_stateStore = stateStore;
_maxClockSkew = maxClockSkew ?? TimeSpan.FromMinutes(1);
_logger = logger ?? NullLogger<HybridLogicalClock>.Instance;
}
/// <inheritdoc/>
public string NodeId { get; }
/// <inheritdoc/>
public HlcTimestamp Current
{
get
{
lock (_lock)
{
return new HlcTimestamp
{
PhysicalTime = _lastPhysicalTime,
NodeId = NodeId,
LogicalCounter = _logicalCounter
};
}
}
}
/// <inheritdoc/>
public HlcTimestamp Tick()
{
HlcTimestamp timestamp;
lock (_lock)
{
var physicalNow = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds();
if (physicalNow > _lastPhysicalTime)
{
// Physical clock advanced - reset counter
_lastPhysicalTime = physicalNow;
_logicalCounter = 0;
}
else
{
// Same or earlier physical time - increment counter
// This handles clock regression gracefully
_logicalCounter++;
// Check for counter overflow (unlikely but handle it)
if (_logicalCounter < 0)
{
_logger.LogWarning(
"HLC logical counter overflow detected, advancing physical time. NodeId={NodeId}",
NodeId);
_lastPhysicalTime++;
_logicalCounter = 0;
}
}
timestamp = new HlcTimestamp
{
PhysicalTime = _lastPhysicalTime,
NodeId = NodeId,
LogicalCounter = _logicalCounter
};
}
// Persist state asynchronously (fire-and-forget with error logging)
_ = PersistStateAsync(timestamp);
return timestamp;
}
/// <inheritdoc/>
public HlcTimestamp Receive(HlcTimestamp remote)
{
HlcTimestamp timestamp;
lock (_lock)
{
var physicalNow = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds();
// Validate clock skew
var skew = TimeSpan.FromMilliseconds(Math.Abs(remote.PhysicalTime - physicalNow));
if (skew > _maxClockSkew)
{
_logger.LogError(
"Clock skew exceeded: observed={ObservedMs}ms, max={MaxMs}ms, remote={RemoteNodeId}",
skew.TotalMilliseconds,
_maxClockSkew.TotalMilliseconds,
remote.NodeId);
throw new HlcClockSkewException(skew, _maxClockSkew);
}
var prevPhysicalTime = _lastPhysicalTime;
var maxPhysical = Math.Max(Math.Max(prevPhysicalTime, remote.PhysicalTime), physicalNow);
if (maxPhysical == prevPhysicalTime && maxPhysical == remote.PhysicalTime)
{
// All three equal - take max counter and increment
_logicalCounter = Math.Max(_logicalCounter, remote.LogicalCounter) + 1;
}
else if (maxPhysical == prevPhysicalTime)
{
// Local was max - increment local counter
_logicalCounter++;
}
else if (maxPhysical == remote.PhysicalTime)
{
// Remote was max - take remote counter and increment
_logicalCounter = remote.LogicalCounter + 1;
}
else
{
// Physical clock advanced - reset counter
_logicalCounter = 0;
}
_lastPhysicalTime = maxPhysical;
// Check for counter overflow
if (_logicalCounter < 0)
{
_logger.LogWarning(
"HLC logical counter overflow on receive, advancing physical time. NodeId={NodeId}",
NodeId);
_lastPhysicalTime++;
_logicalCounter = 0;
}
timestamp = new HlcTimestamp
{
PhysicalTime = _lastPhysicalTime,
NodeId = NodeId,
LogicalCounter = _logicalCounter
};
}
// Persist state asynchronously
_ = PersistStateAsync(timestamp);
return timestamp;
}
/// <summary>
/// Initialize clock state from persistent store.
/// Should be called once during startup.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>True if state was recovered; false if starting fresh.</returns>
public async Task<bool> InitializeAsync(CancellationToken ct = default)
{
var persisted = await _stateStore.LoadAsync(NodeId, ct).ConfigureAwait(false);
if (persisted is { } state)
{
lock (_lock)
{
// Ensure we never go backward
var physicalNow = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds();
_lastPhysicalTime = Math.Max(state.PhysicalTime, physicalNow);
if (_lastPhysicalTime == state.PhysicalTime)
{
// Same physical time - continue from persisted counter + 1
_logicalCounter = state.LogicalCounter + 1;
}
else
{
// Physical time advanced - reset counter
_logicalCounter = 0;
}
}
_logger.LogInformation(
"HLC state recovered: PhysicalTime={PhysicalTime}, Counter={Counter}, NodeId={NodeId}",
_lastPhysicalTime,
_logicalCounter,
NodeId);
return true;
}
lock (_lock)
{
_lastPhysicalTime = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds();
_logicalCounter = 0;
}
_logger.LogInformation(
"HLC initialized fresh: PhysicalTime={PhysicalTime}, NodeId={NodeId}",
_lastPhysicalTime,
NodeId);
return false;
}
private async Task PersistStateAsync(HlcTimestamp timestamp)
{
try
{
await _stateStore.SaveAsync(timestamp).ConfigureAwait(false);
}
catch (Exception ex)
{
// Fire-and-forget with error logging
// Clock continues operating; state will be recovered on next successful save
_logger.LogWarning(
ex,
"Failed to persist HLC state: NodeId={NodeId}, PhysicalTime={PhysicalTime}",
NodeId,
timestamp.PhysicalTime);
}
}
}

View File

@@ -0,0 +1,44 @@
// <copyright file="IHlcStateStore.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Persistent storage for HLC state (survives restarts).
/// </summary>
/// <remarks>
/// <para>
/// Implementations should provide atomic update semantics to prevent
/// state corruption during concurrent operations. The store is used to:
/// </para>
/// <list type="bullet">
/// <item><description>Persist HLC state after each tick (fire-and-forget)</description></item>
/// <item><description>Recover state on node restart</description></item>
/// <item><description>Ensure clock monotonicity across restarts</description></item>
/// </list>
/// </remarks>
public interface IHlcStateStore
{
/// <summary>
/// Load last persisted HLC state for node.
/// </summary>
/// <param name="nodeId">The node identifier to load state for.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The last persisted timestamp, or null if no state exists.</returns>
Task<HlcTimestamp?> LoadAsync(string nodeId, CancellationToken ct = default);
/// <summary>
/// Persist HLC state (called after each tick).
/// </summary>
/// <remarks>
/// <para>
/// This operation should be atomic and idempotent. Implementations may use
/// fire-and-forget semantics with error logging for performance.
/// </para>
/// </remarks>
/// <param name="timestamp">The timestamp state to persist.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>A task representing the async operation.</returns>
Task SaveAsync(HlcTimestamp timestamp, CancellationToken ct = default);
}

View File

@@ -0,0 +1,65 @@
// <copyright file="IHybridLogicalClock.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// Hybrid Logical Clock for monotonic timestamp generation.
/// </summary>
/// <remarks>
/// <para>
/// Implementations must guarantee:
/// </para>
/// <list type="number">
/// <item><description>Successive Tick() calls return strictly increasing timestamps</description></item>
/// <item><description>Receive() merges remote timestamp maintaining causality</description></item>
/// <item><description>Clock state survives restarts via persistence</description></item>
/// </list>
/// </remarks>
public interface IHybridLogicalClock
{
/// <summary>
/// Generate next timestamp for local event.
/// </summary>
/// <remarks>
/// <para>Algorithm:</para>
/// <list type="number">
/// <item><description>l' = l (save previous logical time)</description></item>
/// <item><description>l = max(l, physical_clock())</description></item>
/// <item><description>if l == l': c = c + 1 else: c = 0</description></item>
/// <item><description>return (l, node_id, c)</description></item>
/// </list>
/// </remarks>
/// <returns>A new monotonically increasing timestamp.</returns>
HlcTimestamp Tick();
/// <summary>
/// Update clock on receiving remote timestamp, return merged result.
/// </summary>
/// <remarks>
/// <para>Algorithm:</para>
/// <list type="number">
/// <item><description>l' = l (save previous)</description></item>
/// <item><description>l = max(l', m_l, physical_clock())</description></item>
/// <item><description>Update c based on which max was chosen</description></item>
/// <item><description>return (l, node_id, c)</description></item>
/// </list>
/// </remarks>
/// <param name="remote">The remote timestamp to merge.</param>
/// <returns>A new timestamp incorporating the remote causality.</returns>
/// <exception cref="HlcClockSkewException">
/// Thrown when the remote timestamp differs from physical clock by more than max skew tolerance.
/// </exception>
HlcTimestamp Receive(HlcTimestamp remote);
/// <summary>
/// Gets the current clock state (for persistence/recovery).
/// </summary>
HlcTimestamp Current { get; }
/// <summary>
/// Gets the node identifier for this clock instance.
/// </summary>
string NodeId { get; }
}

View File

@@ -0,0 +1,54 @@
// <copyright file="InMemoryHlcStateStore.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Concurrent;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// In-memory implementation of <see cref="IHlcStateStore"/> for testing and development.
/// </summary>
/// <remarks>
/// <para>
/// State is lost on process restart. Use <see cref="PostgresHlcStateStore"/> for production.
/// </para>
/// </remarks>
public sealed class InMemoryHlcStateStore : IHlcStateStore
{
private readonly ConcurrentDictionary<string, HlcTimestamp> _store = new(StringComparer.Ordinal);
/// <inheritdoc/>
public Task<HlcTimestamp?> LoadAsync(string nodeId, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(nodeId);
return Task.FromResult<HlcTimestamp?>(
_store.TryGetValue(nodeId, out var timestamp) ? timestamp : null);
}
/// <inheritdoc/>
public Task SaveAsync(HlcTimestamp timestamp, CancellationToken ct = default)
{
_store.AddOrUpdate(
timestamp.NodeId,
timestamp,
(_, existing) =>
{
// Only update if new timestamp is greater (prevents regression on concurrent saves)
return timestamp > existing ? timestamp : existing;
});
return Task.CompletedTask;
}
/// <summary>
/// Clear all stored state (for testing).
/// </summary>
public void Clear() => _store.Clear();
/// <summary>
/// Gets the count of stored entries (for testing).
/// </summary>
public int Count => _store.Count;
}

View File

@@ -0,0 +1,171 @@
// <copyright file="PostgresHlcStateStore.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Globalization;
using Dapper;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Npgsql;
namespace StellaOps.HybridLogicalClock;
/// <summary>
/// PostgreSQL implementation of <see cref="IHlcStateStore"/> with atomic update semantics.
/// </summary>
/// <remarks>
/// <para>
/// Requires the following table (created via migration or manually):
/// </para>
/// <code>
/// CREATE TABLE scheduler.hlc_state (
/// node_id TEXT PRIMARY KEY,
/// physical_time BIGINT NOT NULL,
/// logical_counter INT NOT NULL,
/// updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
/// );
/// </code>
/// </remarks>
public sealed class PostgresHlcStateStore : IHlcStateStore
{
private readonly string _connectionString;
private readonly string _schema;
private readonly ILogger<PostgresHlcStateStore> _logger;
/// <summary>
/// Initializes a new instance of the <see cref="PostgresHlcStateStore"/> class.
/// </summary>
/// <param name="connectionString">PostgreSQL connection string.</param>
/// <param name="schema">Schema name (default: "scheduler").</param>
/// <param name="logger">Optional logger.</param>
public PostgresHlcStateStore(
string connectionString,
string schema = "scheduler",
ILogger<PostgresHlcStateStore>? logger = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(connectionString);
ArgumentException.ThrowIfNullOrWhiteSpace(schema);
_connectionString = connectionString;
_schema = schema;
_logger = logger ?? NullLogger<PostgresHlcStateStore>.Instance;
}
/// <inheritdoc/>
public async Task<HlcTimestamp?> LoadAsync(string nodeId, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(nodeId);
var sql = string.Create(
CultureInfo.InvariantCulture,
$"""
SELECT physical_time, logical_counter
FROM {_schema}.hlc_state
WHERE node_id = @NodeId
""");
await using var connection = new NpgsqlConnection(_connectionString);
await connection.OpenAsync(ct).ConfigureAwait(false);
var result = await connection.QuerySingleOrDefaultAsync<HlcStateRow>(
new CommandDefinition(
sql,
new { NodeId = nodeId },
cancellationToken: ct)).ConfigureAwait(false);
if (result is null)
{
return null;
}
return new HlcTimestamp
{
PhysicalTime = result.physical_time,
NodeId = nodeId,
LogicalCounter = result.logical_counter
};
}
/// <inheritdoc/>
public async Task SaveAsync(HlcTimestamp timestamp, CancellationToken ct = default)
{
// Atomic upsert with monotonicity guarantee:
// Only update if new values are greater than existing
var sql = string.Create(
CultureInfo.InvariantCulture,
$"""
INSERT INTO {_schema}.hlc_state (node_id, physical_time, logical_counter, updated_at)
VALUES (@NodeId, @PhysicalTime, @LogicalCounter, NOW())
ON CONFLICT (node_id) DO UPDATE
SET physical_time = GREATEST({_schema}.hlc_state.physical_time, EXCLUDED.physical_time),
logical_counter = CASE
WHEN EXCLUDED.physical_time > {_schema}.hlc_state.physical_time THEN EXCLUDED.logical_counter
WHEN EXCLUDED.physical_time = {_schema}.hlc_state.physical_time
AND EXCLUDED.logical_counter > {_schema}.hlc_state.logical_counter THEN EXCLUDED.logical_counter
ELSE {_schema}.hlc_state.logical_counter
END,
updated_at = NOW()
""");
await using var connection = new NpgsqlConnection(_connectionString);
await connection.OpenAsync(ct).ConfigureAwait(false);
try
{
await connection.ExecuteAsync(
new CommandDefinition(
sql,
new
{
timestamp.NodeId,
timestamp.PhysicalTime,
timestamp.LogicalCounter
},
cancellationToken: ct)).ConfigureAwait(false);
}
catch (NpgsqlException ex)
{
_logger.LogWarning(
ex,
"Failed to save HLC state to PostgreSQL: NodeId={NodeId}, PhysicalTime={PhysicalTime}",
timestamp.NodeId,
timestamp.PhysicalTime);
throw;
}
}
/// <summary>
/// Ensure the HLC state table exists (for development/testing).
/// In production, use migrations.
/// </summary>
/// <param name="ct">Cancellation token.</param>
/// <returns>A task representing the async operation.</returns>
public async Task EnsureTableExistsAsync(CancellationToken ct = default)
{
var sql = string.Create(
CultureInfo.InvariantCulture,
$"""
CREATE SCHEMA IF NOT EXISTS {_schema};
CREATE TABLE IF NOT EXISTS {_schema}.hlc_state (
node_id TEXT PRIMARY KEY,
physical_time BIGINT NOT NULL,
logical_counter INT NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_hlc_state_updated
ON {_schema}.hlc_state(updated_at DESC);
""");
await using var connection = new NpgsqlConnection(_connectionString);
await connection.OpenAsync(ct).ConfigureAwait(false);
await connection.ExecuteAsync(new CommandDefinition(sql, cancellationToken: ct)).ConfigureAwait(false);
_logger.LogInformation("HLC state table ensured in schema {Schema}", _schema);
}
#pragma warning disable IDE1006 // Naming Styles - matches DB column names
private sealed record HlcStateRow(long physical_time, int logical_counter);
#pragma warning restore IDE1006
}

View File

@@ -0,0 +1,320 @@
# StellaOps.HybridLogicalClock
A Hybrid Logical Clock (HLC) implementation for deterministic, monotonic job ordering across distributed nodes. HLC combines physical time with logical counters to provide causally-ordered timestamps even under clock skew.
## Overview
Traditional wall-clock timestamps are susceptible to clock skew across distributed nodes. HLC addresses this by combining:
- **Physical time**: Unix milliseconds UTC, advances with wall clock
- **Node ID**: Unique identifier for the generating node
- **Logical counter**: Increments when events occur at the same physical time
This provides:
- **Monotonicity**: Successive timestamps always increase
- **Causality**: If event A happens-before event B, then HLC(A) < HLC(B)
- **Bounded drift**: Physical component stays close to wall clock
## Installation
```csharp
// In your Startup.cs or Program.cs
services.AddHybridLogicalClock(options =>
{
options.NodeId = "scheduler-east-1";
options.MaxClockSkew = TimeSpan.FromMinutes(1);
options.PostgresConnectionString = configuration.GetConnectionString("Default");
});
```
## Quick Start
### Basic Usage
```csharp
public class JobScheduler
{
private readonly IHybridLogicalClock _clock;
public JobScheduler(IHybridLogicalClock clock)
{
_clock = clock;
}
public Job EnqueueJob(JobPayload payload)
{
// Generate monotonic timestamp for the job
var timestamp = _clock.Tick();
return new Job
{
Id = Guid.NewGuid(),
Timestamp = timestamp,
Payload = payload
};
}
}
```
### Receiving Remote Timestamps
When processing messages from other nodes:
```csharp
public void ProcessRemoteMessage(Message message)
{
// Merge remote timestamp to maintain causality
var localTimestamp = _clock.Receive(message.Timestamp);
// Now localTimestamp > message.Timestamp is guaranteed
ProcessPayload(message.Payload, localTimestamp);
}
```
### Initialization from Persistent State
During application startup, initialize the clock from persisted state:
```csharp
var host = builder.Build();
// Initialize HLC from persistent state before starting
await host.Services.InitializeHlcAsync();
await host.RunAsync();
```
## API Reference
### HlcTimestamp
A readonly record struct representing an HLC timestamp.
```csharp
public readonly record struct HlcTimestamp : IComparable<HlcTimestamp>
{
// Unix milliseconds UTC
public required long PhysicalTime { get; init; }
// Unique node identifier
public required string NodeId { get; init; }
// Logical counter for same-time events
public required int LogicalCounter { get; init; }
// Convert to sortable string: "0001704067200000-node-id-000042"
public string ToSortableString();
// Parse from sortable string
public static HlcTimestamp Parse(string value);
public static bool TryParse(string? value, out HlcTimestamp result);
// Get physical time as DateTimeOffset
public DateTimeOffset PhysicalDateTime { get; }
}
```
### IHybridLogicalClock
The main interface for HLC operations.
```csharp
public interface IHybridLogicalClock
{
// Generate next timestamp for local event
HlcTimestamp Tick();
// Merge with remote timestamp, return new local timestamp
HlcTimestamp Receive(HlcTimestamp remote);
// Current clock state
HlcTimestamp Current { get; }
// Node identifier
string NodeId { get; }
}
```
### IHlcStateStore
Interface for persisting clock state across restarts.
```csharp
public interface IHlcStateStore
{
Task<HlcTimestamp?> LoadAsync(string nodeId, CancellationToken ct = default);
Task SaveAsync(HlcTimestamp timestamp, CancellationToken ct = default);
}
```
Built-in implementations:
- `InMemoryHlcStateStore`: For testing (state lost on restart)
- `PostgresHlcStateStore`: Persists to PostgreSQL
## Configuration
### HlcOptions
| Property | Type | Default | Description |
|----------|------|---------|-------------|
| `NodeId` | string? | auto | Unique node identifier (e.g., "scheduler-east-1") |
| `MaxClockSkew` | TimeSpan | 1 minute | Maximum allowed difference from remote timestamps |
| `PostgresConnectionString` | string? | null | Connection string for PostgreSQL persistence |
| `PostgresSchema` | string | "scheduler" | PostgreSQL schema for HLC tables |
| `UseInMemoryStore` | bool | false | Force in-memory store (for testing) |
### Configuration via appsettings.json
```json
{
"HybridLogicalClock": {
"NodeId": "scheduler-east-1",
"MaxClockSkew": "00:01:00",
"PostgresConnectionString": "Host=localhost;Database=stellaops;Username=app",
"PostgresSchema": "scheduler"
}
}
```
## PostgreSQL Schema
Create the required table:
```sql
CREATE SCHEMA IF NOT EXISTS scheduler;
CREATE TABLE scheduler.hlc_state (
node_id TEXT PRIMARY KEY,
physical_time BIGINT NOT NULL,
logical_counter INT NOT NULL,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX idx_hlc_state_updated ON scheduler.hlc_state(updated_at DESC);
```
## Serialization
### JSON (System.Text.Json)
HlcTimestamp includes a built-in JSON converter that serializes to the sortable string format:
```csharp
var timestamp = clock.Tick();
var json = JsonSerializer.Serialize(timestamp);
// Output: "0001704067200000-scheduler-east-1-000042"
var parsed = JsonSerializer.Deserialize<HlcTimestamp>(json);
```
### Dapper
Register the type handler for Dapper:
```csharp
HlcTimestampTypeHandler.Register();
// Now you can use HlcTimestamp in Dapper queries
var job = connection.QuerySingle<Job>(
"SELECT * FROM jobs WHERE timestamp > @Timestamp",
new { Timestamp = minTimestamp });
```
## Error Handling
### HlcClockSkewException
Thrown when a remote timestamp differs from local physical clock by more than `MaxClockSkew`:
```csharp
try
{
var localTs = clock.Receive(remoteTimestamp);
}
catch (HlcClockSkewException ex)
{
logger.LogError(
"Clock skew exceeded: observed {ObservedMs}ms, max {MaxMs}ms",
ex.ObservedSkew.TotalMilliseconds,
ex.MaxSkew.TotalMilliseconds);
// Reject the message or alert operations
}
```
## Testing
For unit tests, use FakeTimeProvider and InMemoryHlcStateStore:
```csharp
[Fact]
public void Tick_ReturnsMonotonicallyIncreasingTimestamps()
{
var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
var stateStore = new InMemoryHlcStateStore();
var clock = new HybridLogicalClock(timeProvider, "test-node", stateStore);
var t1 = clock.Tick();
var t2 = clock.Tick();
var t3 = clock.Tick();
Assert.True(t1 < t2);
Assert.True(t2 < t3);
}
```
## Performance
Benchmarks on typical hardware:
| Operation | Throughput | Allocation |
|-----------|------------|------------|
| Tick | ~5M ops/sec | 0 bytes |
| Receive | ~3M ops/sec | 0 bytes |
| ToSortableString | ~10M ops/sec | 80 bytes |
| Parse | ~5M ops/sec | 48 bytes |
Run benchmarks:
```bash
cd src/__Libraries/StellaOps.HybridLogicalClock.Benchmarks
dotnet run -c Release
```
## Algorithm
The HLC algorithm (Lamport + Physical Clock Hybrid):
**On local event or send (Tick):**
```
l' = l # save previous logical time
l = max(l, physical_clock()) # advance to at least physical time
if l == l':
c = c + 1 # same physical time, increment counter
else:
c = 0 # new physical time, reset counter
return (l, node_id, c)
```
**On receive (Receive):**
```
l' = l
l = max(l', m_l, physical_clock())
if l == l' == m_l:
c = max(c, m_c) + 1 # all equal, take max counter + 1
elif l == l':
c = c + 1 # local was max, increment local counter
elif l == m_l:
c = m_c + 1 # remote was max, take remote counter + 1
else:
c = 0 # physical clock advanced, reset
return (l, node_id, c)
```
## References
- [Logical Physical Clocks and Consistent Snapshots](https://cse.buffalo.edu/tech-reports/2014-04.pdf) - Original HLC paper
- [Time, Clocks, and the Ordering of Events](https://lamport.azurewebsites.net/pubs/time-clocks.pdf) - Lamport clocks
## License
AGPL-3.0-or-later

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<Description>Hybrid Logical Clock (HLC) implementation for deterministic, monotonic job ordering across distributed nodes.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
<PackageReference Include="Microsoft.Extensions.Options.DataAnnotations" />
<PackageReference Include="Npgsql" />
<PackageReference Include="Dapper" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,323 @@
// <copyright file="ReplayProofTests.cs" company="Stella Operations">
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using FluentAssertions;
using StellaOps.Replay.Core.Models;
using Xunit;
namespace StellaOps.Replay.Core.Tests;
/// <summary>
/// Unit tests for ReplayProof model and compact string generation.
/// Sprint: SPRINT_20260105_002_001_REPLAY, Tasks RPL-011 through RPL-014.
/// </summary>
[Trait("Category", "Unit")]
public class ReplayProofTests
{
private static readonly DateTimeOffset FixedTimestamp = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
[Fact]
public void FromExecutionResult_CreatesValidProof()
{
// Arrange & Act
var proof = ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: "1.0.0",
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0",
artifactDigest: "sha256:image123",
signatureVerified: true,
signatureKeyId: "key-001");
// Assert
proof.BundleHash.Should().Be("sha256:abc123");
proof.PolicyVersion.Should().Be("1.0.0");
proof.VerdictRoot.Should().Be("sha256:def456");
proof.VerdictMatches.Should().BeTrue();
proof.DurationMs.Should().Be(150);
proof.ReplayedAt.Should().Be(FixedTimestamp);
proof.EngineVersion.Should().Be("1.0.0");
proof.ArtifactDigest.Should().Be("sha256:image123");
proof.SignatureVerified.Should().BeTrue();
proof.SignatureKeyId.Should().Be("key-001");
}
[Fact]
public void ToCompactString_GeneratesCorrectFormat()
{
// Arrange
var proof = CreateTestProof();
// Act
var compact = proof.ToCompactString();
// Assert
compact.Should().StartWith("replay-proof:");
compact.Should().HaveLength("replay-proof:".Length + 64); // SHA-256 hex = 64 chars
}
[Fact]
public void ToCompactString_IsDeterministic()
{
// Arrange
var proof1 = CreateTestProof();
var proof2 = CreateTestProof();
// Act
var compact1 = proof1.ToCompactString();
var compact2 = proof2.ToCompactString();
// Assert
compact1.Should().Be(compact2, "same inputs should produce same compact proof");
}
[Fact]
public void ToCanonicalJson_SortsKeysDeterministically()
{
// Arrange
var proof = CreateTestProof();
// Act
var json = proof.ToCanonicalJson();
// Assert - Keys should appear in alphabetical order
var keys = ExtractJsonKeys(json);
keys.Should().BeInAscendingOrder(StringComparer.Ordinal);
}
[Fact]
public void ToCanonicalJson_ExcludesNullValues()
{
// Arrange
var proof = ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: "1.0.0",
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0");
// Act
var json = proof.ToCanonicalJson();
// Assert - Should not contain null values
json.Should().NotContain("null");
json.Should().NotContain("artifactDigest"); // Not set, so excluded
json.Should().NotContain("signatureVerified"); // Not set, so excluded
json.Should().NotContain("signatureKeyId"); // Not set, so excluded
}
[Fact]
public void ToCanonicalJson_FormatsTimestampCorrectly()
{
// Arrange
var proof = CreateTestProof();
// Act
var json = proof.ToCanonicalJson();
// Assert - ISO 8601 UTC format
json.Should().Contain("2026-01-05T12:00:00.000Z");
}
[Fact]
public void ValidateCompactString_ReturnsTrueForValidProof()
{
// Arrange
var proof = CreateTestProof();
var compact = proof.ToCompactString();
var canonicalJson = proof.ToCanonicalJson();
// Act
var isValid = ReplayProof.ValidateCompactString(compact, canonicalJson);
// Assert
isValid.Should().BeTrue();
}
[Fact]
public void ValidateCompactString_ReturnsFalseForTamperedJson()
{
// Arrange
var proof = CreateTestProof();
var compact = proof.ToCompactString();
var tamperedJson = proof.ToCanonicalJson().Replace("1.0.0", "2.0.0");
// Act
var isValid = ReplayProof.ValidateCompactString(compact, tamperedJson);
// Assert
isValid.Should().BeFalse("tampered JSON should not validate");
}
[Fact]
public void ValidateCompactString_ReturnsFalseForInvalidPrefix()
{
// Arrange
var canonicalJson = CreateTestProof().ToCanonicalJson();
// Act
var isValid = ReplayProof.ValidateCompactString("invalid-proof:abc123", canonicalJson);
// Assert
isValid.Should().BeFalse("invalid prefix should not validate");
}
[Fact]
public void ValidateCompactString_ReturnsFalseForEmptyInputs()
{
// Act & Assert
ReplayProof.ValidateCompactString("", "{}").Should().BeFalse();
ReplayProof.ValidateCompactString("replay-proof:abc", "").Should().BeFalse();
ReplayProof.ValidateCompactString(null!, "{}").Should().BeFalse();
ReplayProof.ValidateCompactString("replay-proof:abc", null!).Should().BeFalse();
}
[Fact]
public void ToCanonicalJson_IncludesMetadataWhenPresent()
{
// Arrange
var proof = ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: "1.0.0",
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0",
metadata: ImmutableDictionary<string, string>.Empty
.Add("tenant", "acme-corp")
.Add("project", "web-app"));
// Act
var json = proof.ToCanonicalJson();
// Assert
json.Should().Contain("metadata");
json.Should().Contain("tenant");
json.Should().Contain("acme-corp");
json.Should().Contain("project");
json.Should().Contain("web-app");
}
[Fact]
public void ToCanonicalJson_SortsMetadataKeys()
{
// Arrange
var proof = ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: "1.0.0",
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0",
metadata: ImmutableDictionary<string, string>.Empty
.Add("zebra", "z-value")
.Add("alpha", "a-value")
.Add("mike", "m-value"));
// Act
var json = proof.ToCanonicalJson();
// Assert - Metadata keys should be in alphabetical order
var alphaPos = json.IndexOf("alpha", StringComparison.Ordinal);
var mikePos = json.IndexOf("mike", StringComparison.Ordinal);
var zebraPos = json.IndexOf("zebra", StringComparison.Ordinal);
alphaPos.Should().BeLessThan(mikePos);
mikePos.Should().BeLessThan(zebraPos);
}
[Fact]
public void FromExecutionResult_ThrowsOnNullRequiredParams()
{
// Act & Assert
var act1 = () => ReplayProof.FromExecutionResult(
bundleHash: null!,
policyVersion: "1.0.0",
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0");
act1.Should().Throw<ArgumentNullException>().WithParameterName("bundleHash");
var act2 = () => ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: null!,
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0");
act2.Should().Throw<ArgumentNullException>().WithParameterName("policyVersion");
var act3 = () => ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: "1.0.0",
verdictRoot: null!,
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0");
act3.Should().Throw<ArgumentNullException>().WithParameterName("verdictRoot");
var act4 = () => ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123",
policyVersion: "1.0.0",
verdictRoot: "sha256:def456",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: null!);
act4.Should().Throw<ArgumentNullException>().WithParameterName("engineVersion");
}
[Fact]
public void SchemaVersion_DefaultsTo1_0_0()
{
// Arrange & Act
var proof = CreateTestProof();
// Assert
proof.SchemaVersion.Should().Be("1.0.0");
}
private static ReplayProof CreateTestProof()
{
return ReplayProof.FromExecutionResult(
bundleHash: "sha256:abc123def456",
policyVersion: "1.0.0",
verdictRoot: "sha256:verdict789",
verdictMatches: true,
durationMs: 150,
replayedAt: FixedTimestamp,
engineVersion: "1.0.0",
artifactDigest: "sha256:image123",
signatureVerified: true,
signatureKeyId: "key-001");
}
private static List<string> ExtractJsonKeys(string json)
{
var keys = new List<string>();
using var doc = JsonDocument.Parse(json);
foreach (var prop in doc.RootElement.EnumerateObject())
{
keys.Add(prop.Name);
}
return keys;
}
}

View File

@@ -0,0 +1,204 @@
// <copyright file="ReplayProof.cs" company="Stella Operations">
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
// </copyright>
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Models;
/// <summary>
/// Compact proof artifact for audit trails and ticket attachments.
/// Captures the essential evidence that a replay was performed and matched expectations.
/// </summary>
public sealed record ReplayProof
{
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
[JsonPropertyName("schemaVersion")]
public string SchemaVersion { get; init; } = "1.0.0";
/// <summary>
/// SHA-256 of the replay bundle used.
/// </summary>
[JsonPropertyName("bundleHash")]
public required string BundleHash { get; init; }
/// <summary>
/// Policy version used in the replay.
/// </summary>
[JsonPropertyName("policyVersion")]
public required string PolicyVersion { get; init; }
/// <summary>
/// Merkle root of all verdict outputs.
/// </summary>
[JsonPropertyName("verdictRoot")]
public required string VerdictRoot { get; init; }
/// <summary>
/// Whether the replayed verdict matches the expected verdict.
/// </summary>
[JsonPropertyName("verdictMatches")]
public required bool VerdictMatches { get; init; }
/// <summary>
/// Replay execution duration in milliseconds.
/// </summary>
[JsonPropertyName("durationMs")]
public required long DurationMs { get; init; }
/// <summary>
/// UTC timestamp when replay was performed.
/// </summary>
[JsonPropertyName("replayedAt")]
public required DateTimeOffset ReplayedAt { get; init; }
/// <summary>
/// Version of the replay engine used.
/// </summary>
[JsonPropertyName("engineVersion")]
public required string EngineVersion { get; init; }
/// <summary>
/// Original artifact digest (image or SBOM) that was evaluated.
/// </summary>
[JsonPropertyName("artifactDigest")]
public string? ArtifactDigest { get; init; }
/// <summary>
/// DSSE signature verified status (true/false/null if not present).
/// </summary>
[JsonPropertyName("signatureVerified")]
public bool? SignatureVerified { get; init; }
/// <summary>
/// Key ID used for signature verification.
/// </summary>
[JsonPropertyName("signatureKeyId")]
public string? SignatureKeyId { get; init; }
/// <summary>
/// Additional metadata (e.g., organization, project, tenant).
/// </summary>
[JsonPropertyName("metadata")]
public ImmutableDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// JSON serializer options for canonical serialization (sorted keys, no indentation).
/// </summary>
private static readonly JsonSerializerOptions CanonicalOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
// Note: We manually ensure sorted keys in ToCanonicalJson()
};
/// <summary>
/// Converts the proof to a compact string format: "replay-proof:&lt;sha256&gt;".
/// The hash is computed over the canonical JSON representation.
/// </summary>
/// <returns>Compact proof string suitable for ticket attachments.</returns>
public string ToCompactString()
{
var canonicalJson = ToCanonicalJson();
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
var hashHex = Convert.ToHexString(hashBytes).ToLowerInvariant();
return $"replay-proof:{hashHex}";
}
/// <summary>
/// Converts the proof to canonical JSON (RFC 8785 style: sorted keys, minimal whitespace).
/// </summary>
/// <returns>Canonical JSON string.</returns>
public string ToCanonicalJson()
{
// Build ordered dictionary for canonical serialization
var ordered = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["artifactDigest"] = ArtifactDigest,
["bundleHash"] = BundleHash,
["durationMs"] = DurationMs,
["engineVersion"] = EngineVersion,
["metadata"] = Metadata is not null && Metadata.Count > 0
? new SortedDictionary<string, string>(Metadata, StringComparer.Ordinal)
: null,
["policyVersion"] = PolicyVersion,
["replayedAt"] = ReplayedAt.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.fffZ", System.Globalization.CultureInfo.InvariantCulture),
["schemaVersion"] = SchemaVersion,
["signatureKeyId"] = SignatureKeyId,
["signatureVerified"] = SignatureVerified,
["verdictMatches"] = VerdictMatches,
["verdictRoot"] = VerdictRoot,
};
// Remove null values for canonical form
var filtered = ordered.Where(kvp => kvp.Value is not null)
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
return JsonSerializer.Serialize(filtered, CanonicalOptions);
}
/// <summary>
/// Parses a compact proof string and validates its hash.
/// </summary>
/// <param name="compactString">The compact proof string (replay-proof:&lt;hash&gt;).</param>
/// <param name="originalJson">The original canonical JSON to verify against.</param>
/// <returns>True if the hash matches, false otherwise.</returns>
public static bool ValidateCompactString(string compactString, string originalJson)
{
if (string.IsNullOrWhiteSpace(compactString) || string.IsNullOrWhiteSpace(originalJson))
{
return false;
}
const string prefix = "replay-proof:";
if (!compactString.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return false;
}
var expectedHash = compactString[prefix.Length..];
var actualHashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(originalJson));
var actualHash = Convert.ToHexString(actualHashBytes).ToLowerInvariant();
return string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Creates a ReplayProof from execution results.
/// </summary>
public static ReplayProof FromExecutionResult(
string bundleHash,
string policyVersion,
string verdictRoot,
bool verdictMatches,
long durationMs,
DateTimeOffset replayedAt,
string engineVersion,
string? artifactDigest = null,
bool? signatureVerified = null,
string? signatureKeyId = null,
ImmutableDictionary<string, string>? metadata = null)
{
return new ReplayProof
{
BundleHash = bundleHash ?? throw new ArgumentNullException(nameof(bundleHash)),
PolicyVersion = policyVersion ?? throw new ArgumentNullException(nameof(policyVersion)),
VerdictRoot = verdictRoot ?? throw new ArgumentNullException(nameof(verdictRoot)),
VerdictMatches = verdictMatches,
DurationMs = durationMs,
ReplayedAt = replayedAt,
EngineVersion = engineVersion ?? throw new ArgumentNullException(nameof(engineVersion)),
ArtifactDigest = artifactDigest,
SignatureVerified = signatureVerified,
SignatureKeyId = signatureKeyId,
Metadata = metadata,
};
}
}

View File

@@ -0,0 +1,278 @@
// <copyright file="BlastRadiusTestRunner.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-002
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.TestKit.BlastRadius;
/// <summary>
/// Runs tests filtered by blast radius for incident response.
/// </summary>
public static class BlastRadiusTestRunner
{
/// <summary>
/// Get xUnit filter for specific blast radii.
/// </summary>
/// <param name="blastRadii">Blast radii to filter by.</param>
/// <returns>xUnit filter string.</returns>
/// <exception cref="ArgumentException">Thrown when no blast radii provided.</exception>
public static string GetFilter(params string[] blastRadii)
{
if (blastRadii.Length == 0)
{
throw new ArgumentException("At least one blast radius required", nameof(blastRadii));
}
var filters = blastRadii.Select(br => $"BlastRadius={br}");
return string.Join("|", filters);
}
/// <summary>
/// Get xUnit filter for specific blast radii (IEnumerable overload).
/// </summary>
/// <param name="blastRadii">Blast radii to filter by.</param>
/// <returns>xUnit filter string.</returns>
public static string GetFilter(IEnumerable<string> blastRadii)
{
return GetFilter(blastRadii.ToArray());
}
/// <summary>
/// Get the dotnet test command for specific blast radii.
/// </summary>
/// <param name="testProject">Test project path or solution.</param>
/// <param name="blastRadii">Blast radii to filter by.</param>
/// <param name="additionalArgs">Additional dotnet test arguments.</param>
/// <returns>Complete dotnet test command.</returns>
public static string GetCommand(
string testProject,
IEnumerable<string> blastRadii,
string? additionalArgs = null)
{
var filter = GetFilter(blastRadii);
var args = $"test {testProject} --filter \"{filter}\"";
if (!string.IsNullOrWhiteSpace(additionalArgs))
{
args += $" {additionalArgs}";
}
return $"dotnet {args}";
}
/// <summary>
/// Run tests for specific operational surfaces.
/// </summary>
/// <param name="testProject">Test project path or solution.</param>
/// <param name="blastRadii">Blast radii to run tests for.</param>
/// <param name="workingDirectory">Working directory for test execution.</param>
/// <param name="timeoutMs">Timeout in milliseconds.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Test run result.</returns>
public static async Task<TestRunResult> RunForBlastRadiiAsync(
string testProject,
string[] blastRadii,
string? workingDirectory = null,
int timeoutMs = 600000,
CancellationToken ct = default)
{
var filter = GetFilter(blastRadii);
var startInfo = new ProcessStartInfo
{
FileName = "dotnet",
Arguments = $"test {testProject} --filter \"{filter}\" --logger trx --verbosity normal",
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
if (!string.IsNullOrWhiteSpace(workingDirectory))
{
startInfo.WorkingDirectory = workingDirectory;
}
var stdout = new List<string>();
var stderr = new List<string>();
var sw = Stopwatch.StartNew();
using var process = new Process { StartInfo = startInfo };
process.OutputDataReceived += (_, e) =>
{
if (e.Data != null)
{
stdout.Add(e.Data);
}
};
process.ErrorDataReceived += (_, e) =>
{
if (e.Data != null)
{
stderr.Add(e.Data);
}
};
process.Start();
process.BeginOutputReadLine();
process.BeginErrorReadLine();
using var cts = CancellationTokenSource.CreateLinkedTokenSource(ct);
cts.CancelAfter(timeoutMs);
try
{
await process.WaitForExitAsync(cts.Token);
}
catch (OperationCanceledException)
{
try
{
process.Kill(entireProcessTree: true);
}
catch
{
// Ignore kill errors
}
return new TestRunResult(
ExitCode: -1,
BlastRadii: [.. blastRadii],
Filter: filter,
DurationMs: sw.ElapsedMilliseconds,
Output: [.. stdout],
Errors: [.. stderr],
TimedOut: true);
}
sw.Stop();
return new TestRunResult(
ExitCode: process.ExitCode,
BlastRadii: [.. blastRadii],
Filter: filter,
DurationMs: sw.ElapsedMilliseconds,
Output: [.. stdout],
Errors: [.. stderr],
TimedOut: false);
}
/// <summary>
/// Run tests for a single blast radius.
/// </summary>
/// <param name="testProject">Test project path or solution.</param>
/// <param name="blastRadius">Blast radius to run tests for.</param>
/// <param name="workingDirectory">Working directory for test execution.</param>
/// <param name="timeoutMs">Timeout in milliseconds.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Test run result.</returns>
public static Task<TestRunResult> RunForBlastRadiusAsync(
string testProject,
string blastRadius,
string? workingDirectory = null,
int timeoutMs = 600000,
CancellationToken ct = default)
{
return RunForBlastRadiiAsync(testProject, [blastRadius], workingDirectory, timeoutMs, ct);
}
/// <summary>
/// Parse test results from TRX output.
/// </summary>
/// <param name="result">Test run result.</param>
/// <returns>Summary of test results.</returns>
public static TestRunSummary ParseSummary(TestRunResult result)
{
var summary = new TestRunSummary(
Passed: 0,
Failed: 0,
Skipped: 0,
Total: 0);
foreach (var line in result.Output)
{
// Parse dotnet test output format: "Passed: X" etc.
if (line.Contains("Passed:", StringComparison.OrdinalIgnoreCase))
{
var match = System.Text.RegularExpressions.Regex.Match(line, @"Passed:\s*(\d+)");
if (match.Success && int.TryParse(match.Groups[1].Value, out var passed))
{
summary = summary with { Passed = passed };
}
}
if (line.Contains("Failed:", StringComparison.OrdinalIgnoreCase))
{
var match = System.Text.RegularExpressions.Regex.Match(line, @"Failed:\s*(\d+)");
if (match.Success && int.TryParse(match.Groups[1].Value, out var failed))
{
summary = summary with { Failed = failed };
}
}
if (line.Contains("Skipped:", StringComparison.OrdinalIgnoreCase))
{
var match = System.Text.RegularExpressions.Regex.Match(line, @"Skipped:\s*(\d+)");
if (match.Success && int.TryParse(match.Groups[1].Value, out var skipped))
{
summary = summary with { Skipped = skipped };
}
}
if (line.Contains("Total:", StringComparison.OrdinalIgnoreCase))
{
var match = System.Text.RegularExpressions.Regex.Match(line, @"Total:\s*(\d+)");
if (match.Success && int.TryParse(match.Groups[1].Value, out var total))
{
summary = summary with { Total = total };
}
}
}
return summary;
}
}
/// <summary>
/// Result of running tests for blast radii.
/// </summary>
/// <param name="ExitCode">Process exit code (0 = success).</param>
/// <param name="BlastRadii">Blast radii that were tested.</param>
/// <param name="Filter">xUnit filter that was used.</param>
/// <param name="DurationMs">Duration of test run in milliseconds.</param>
/// <param name="Output">Standard output lines.</param>
/// <param name="Errors">Standard error lines.</param>
/// <param name="TimedOut">Whether the test run timed out.</param>
public sealed record TestRunResult(
int ExitCode,
ImmutableArray<string> BlastRadii,
string Filter,
long DurationMs,
ImmutableArray<string> Output,
ImmutableArray<string> Errors,
bool TimedOut)
{
/// <summary>
/// Gets a value indicating whether the test run was successful.
/// </summary>
public bool IsSuccess => ExitCode == 0 && !TimedOut;
}
/// <summary>
/// Summary of test run results.
/// </summary>
/// <param name="Passed">Number of passed tests.</param>
/// <param name="Failed">Number of failed tests.</param>
/// <param name="Skipped">Number of skipped tests.</param>
/// <param name="Total">Total number of tests.</param>
public sealed record TestRunSummary(
int Passed,
int Failed,
int Skipped,
int Total);

View File

@@ -0,0 +1,241 @@
// <copyright file="BlastRadiusValidator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
// </copyright>
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
// Task: CCUT-003
using System.Collections.Immutable;
using System.Reflection;
namespace StellaOps.TestKit.BlastRadius;
/// <summary>
/// Validates that tests have appropriate blast-radius annotations.
/// </summary>
public sealed class BlastRadiusValidator
{
private readonly IReadOnlyList<Type> _testClasses;
private readonly BlastRadiusValidationConfig _config;
/// <summary>
/// Initializes a new instance of the <see cref="BlastRadiusValidator"/> class.
/// </summary>
/// <param name="testClasses">Test classes to validate.</param>
/// <param name="config">Validation configuration.</param>
public BlastRadiusValidator(
IEnumerable<Type> testClasses,
BlastRadiusValidationConfig? config = null)
{
_testClasses = testClasses.ToList();
_config = config ?? new BlastRadiusValidationConfig();
}
/// <summary>
/// Create a validator from assemblies.
/// </summary>
/// <param name="assemblies">Assemblies to scan for test classes.</param>
/// <param name="config">Validation configuration.</param>
/// <returns>BlastRadiusValidator instance.</returns>
public static BlastRadiusValidator FromAssemblies(
IEnumerable<Assembly> assemblies,
BlastRadiusValidationConfig? config = null)
{
var testClasses = assemblies
.SelectMany(a => a.GetTypes())
.Where(IsTestClass)
.ToList();
return new BlastRadiusValidator(testClasses, config);
}
/// <summary>
/// Validate all tests that require blast-radius annotations.
/// </summary>
/// <returns>Validation result.</returns>
public BlastRadiusValidationResult Validate()
{
var violations = new List<BlastRadiusViolation>();
foreach (var testClass in _testClasses)
{
var classTraits = GetTraits(testClass);
// Check if class has a category that requires blast radius
var categories = classTraits
.Where(t => t.Name == "Category")
.Select(t => t.Value)
.ToList();
var requiresBlastRadius = categories
.Any(c => _config.CategoriesRequiringBlastRadius.Contains(c));
if (!requiresBlastRadius)
{
continue;
}
// Check if class has blast radius annotation
var hasBlastRadius = classTraits.Any(t => t.Name == "BlastRadius");
if (!hasBlastRadius)
{
violations.Add(new BlastRadiusViolation(
TestClass: testClass.FullName ?? testClass.Name,
Category: string.Join(", ", categories.Where(c => _config.CategoriesRequiringBlastRadius.Contains(c))),
Message: $"Test class requires BlastRadius annotation because it has category: {string.Join(", ", categories.Where(c => _config.CategoriesRequiringBlastRadius.Contains(c)))}"));
}
}
return new BlastRadiusValidationResult(
IsValid: violations.Count == 0,
Violations: [.. violations],
TotalTestClasses: _testClasses.Count,
TestClassesRequiringBlastRadius: _testClasses.Count(c =>
GetTraits(c).Any(t =>
t.Name == "Category" &&
_config.CategoriesRequiringBlastRadius.Contains(t.Value))));
}
/// <summary>
/// Get coverage report by blast radius.
/// </summary>
/// <returns>Coverage report.</returns>
public BlastRadiusCoverageReport GetCoverageReport()
{
var byBlastRadius = new Dictionary<string, List<string>>();
var uncategorized = new List<string>();
foreach (var testClass in _testClasses)
{
var traits = GetTraits(testClass);
var blastRadii = traits
.Where(t => t.Name == "BlastRadius")
.Select(t => t.Value)
.ToList();
if (blastRadii.Count == 0)
{
uncategorized.Add(testClass.FullName ?? testClass.Name);
}
else
{
foreach (var br in blastRadii)
{
if (!byBlastRadius.TryGetValue(br, out var list))
{
list = [];
byBlastRadius[br] = list;
}
list.Add(testClass.FullName ?? testClass.Name);
}
}
}
return new BlastRadiusCoverageReport(
ByBlastRadius: byBlastRadius.ToImmutableDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray()),
UncategorizedTestClasses: [.. uncategorized],
TotalTestClasses: _testClasses.Count);
}
/// <summary>
/// Get all blast radius values found in test classes.
/// </summary>
/// <returns>Distinct blast radius values.</returns>
public IReadOnlyList<string> GetBlastRadiusValues()
{
return _testClasses
.SelectMany(c => GetTraits(c))
.Where(t => t.Name == "BlastRadius")
.Select(t => t.Value)
.Distinct()
.OrderBy(v => v)
.ToList();
}
private static bool IsTestClass(Type type)
{
if (!type.IsClass || type.IsAbstract)
{
return false;
}
// Check for xUnit test methods
return type.GetMethods()
.Any(m => m.GetCustomAttributes()
.Any(a => a.GetType().Name is "FactAttribute" or "TheoryAttribute"));
}
private static IEnumerable<(string Name, string Value)> GetTraits(Type type)
{
var traitAttributes = type.GetCustomAttributes()
.Where(a => a.GetType().Name == "TraitAttribute")
.ToList();
foreach (var attr in traitAttributes)
{
var nameProperty = attr.GetType().GetProperty("Name");
var valueProperty = attr.GetType().GetProperty("Value");
if (nameProperty != null && valueProperty != null)
{
var name = nameProperty.GetValue(attr)?.ToString() ?? string.Empty;
var value = valueProperty.GetValue(attr)?.ToString() ?? string.Empty;
yield return (name, value);
}
}
}
}
/// <summary>
/// Configuration for blast-radius validation.
/// </summary>
/// <param name="CategoriesRequiringBlastRadius">Categories that require blast-radius annotations.</param>
public sealed record BlastRadiusValidationConfig(
ImmutableArray<string> CategoriesRequiringBlastRadius = default)
{
/// <summary>
/// Gets the categories requiring blast-radius annotations.
/// </summary>
public ImmutableArray<string> CategoriesRequiringBlastRadius { get; init; } =
CategoriesRequiringBlastRadius.IsDefaultOrEmpty
? [TestCategories.Integration, TestCategories.Contract, TestCategories.Security]
: CategoriesRequiringBlastRadius;
}
/// <summary>
/// Result of blast-radius validation.
/// </summary>
/// <param name="IsValid">Whether all tests pass validation.</param>
/// <param name="Violations">List of violations found.</param>
/// <param name="TotalTestClasses">Total number of test classes examined.</param>
/// <param name="TestClassesRequiringBlastRadius">Number of test classes that require blast-radius.</param>
public sealed record BlastRadiusValidationResult(
bool IsValid,
ImmutableArray<BlastRadiusViolation> Violations,
int TotalTestClasses,
int TestClassesRequiringBlastRadius);
/// <summary>
/// A blast-radius validation violation.
/// </summary>
/// <param name="TestClass">Test class with violation.</param>
/// <param name="Category">Category requiring blast-radius.</param>
/// <param name="Message">Violation message.</param>
public sealed record BlastRadiusViolation(
string TestClass,
string Category,
string Message);
/// <summary>
/// Coverage report by blast radius.
/// </summary>
/// <param name="ByBlastRadius">Test classes grouped by blast radius.</param>
/// <param name="UncategorizedTestClasses">Test classes without blast-radius annotation.</param>
/// <param name="TotalTestClasses">Total number of test classes.</param>
public sealed record BlastRadiusCoverageReport(
ImmutableDictionary<string, ImmutableArray<string>> ByBlastRadius,
ImmutableArray<string> UncategorizedTestClasses,
int TotalTestClasses);

View File

@@ -128,4 +128,94 @@ public static class TestCategories
/// Storage migration tests: Schema migrations, versioning, idempotent migration application.
/// </summary>
public const string StorageMigration = "StorageMigration";
// =========================================================================
// Blast-Radius annotations - operational surfaces affected by test failures
// Use these to enable targeted test runs during incidents
// =========================================================================
/// <summary>
/// Blast-radius annotations for operational surfaces.
/// </summary>
/// <remarks>
/// Usage with xUnit:
/// <code>
/// [Fact]
/// [Trait("Category", TestCategories.Integration)]
/// [Trait("BlastRadius", TestCategories.BlastRadius.Auth)]
/// [Trait("BlastRadius", TestCategories.BlastRadius.Api)]
/// public async Task TestTokenValidation() { }
/// </code>
///
/// Filter by blast radius during test runs:
/// <code>
/// dotnet test --filter "BlastRadius=Auth|BlastRadius=Api"
/// </code>
/// </remarks>
public static class BlastRadius
{
/// <summary>
/// Authentication, authorization, identity, tokens, sessions.
/// </summary>
public const string Auth = "Auth";
/// <summary>
/// SBOM generation, vulnerability scanning, reachability analysis.
/// </summary>
public const string Scanning = "Scanning";
/// <summary>
/// Attestation, evidence storage, audit trails, proof chains.
/// </summary>
public const string Evidence = "Evidence";
/// <summary>
/// Regulatory compliance, GDPR, data retention, audit logging.
/// </summary>
public const string Compliance = "Compliance";
/// <summary>
/// Advisory ingestion, VEX processing, feed synchronization.
/// </summary>
public const string Advisories = "Advisories";
/// <summary>
/// Risk scoring, policy evaluation, verdicts.
/// </summary>
public const string RiskPolicy = "RiskPolicy";
/// <summary>
/// Cryptographic operations, signing, verification, key management.
/// </summary>
public const string Crypto = "Crypto";
/// <summary>
/// External integrations, webhooks, notifications.
/// </summary>
public const string Integrations = "Integrations";
/// <summary>
/// Data persistence, database operations, storage.
/// </summary>
public const string Persistence = "Persistence";
/// <summary>
/// API surface, contract compatibility, endpoint behavior.
/// </summary>
public const string Api = "Api";
}
// =========================================================================
// Schema evolution categories
// =========================================================================
/// <summary>
/// Schema evolution tests: Backward/forward compatibility across schema versions.
/// </summary>
public const string SchemaEvolution = "SchemaEvolution";
/// <summary>
/// Config-diff tests: Behavioral delta tests for configuration changes.
/// </summary>
public const string ConfigDiff = "ConfigDiff";
}