Merge branch 'main' of https://git.stella-ops.org/stella-ops.org/git.stella-ops.org
This commit is contained in:
627
src/__Libraries/StellaOps.Facet.Tests/FacetDriftDetectorTests.cs
Normal file
627
src/__Libraries/StellaOps.Facet.Tests/FacetDriftDetectorTests.cs
Normal file
@@ -0,0 +1,627 @@
|
||||
// <copyright file="FacetDriftDetectorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Facet.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="FacetDriftDetector"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class FacetDriftDetectorTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly FacetDriftDetector _detector;
|
||||
|
||||
public FacetDriftDetectorTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 6, 12, 0, 0, TimeSpan.Zero));
|
||||
_detector = new FacetDriftDetector(_timeProvider);
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static FacetSeal CreateBaseline(
|
||||
params FacetEntry[] facets)
|
||||
{
|
||||
return new FacetSeal
|
||||
{
|
||||
ImageDigest = "sha256:baseline123",
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddDays(-1),
|
||||
Facets = [.. facets],
|
||||
CombinedMerkleRoot = "sha256:combined123"
|
||||
};
|
||||
}
|
||||
|
||||
private static FacetSeal CreateBaselineWithQuotas(
|
||||
ImmutableDictionary<string, FacetQuota> quotas,
|
||||
params FacetEntry[] facets)
|
||||
{
|
||||
return new FacetSeal
|
||||
{
|
||||
ImageDigest = "sha256:baseline123",
|
||||
CreatedAt = DateTimeOffset.UtcNow.AddDays(-1),
|
||||
Facets = [.. facets],
|
||||
Quotas = quotas,
|
||||
CombinedMerkleRoot = "sha256:combined123"
|
||||
};
|
||||
}
|
||||
|
||||
private static FacetSeal CreateCurrent(
|
||||
params FacetEntry[] facets)
|
||||
{
|
||||
return new FacetSeal
|
||||
{
|
||||
ImageDigest = "sha256:current456",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
Facets = [.. facets],
|
||||
CombinedMerkleRoot = "sha256:combined456"
|
||||
};
|
||||
}
|
||||
|
||||
private static FacetEntry CreateFacetEntry(
|
||||
string facetId,
|
||||
string merkleRoot,
|
||||
int fileCount,
|
||||
ImmutableArray<FacetFileEntry>? files = null)
|
||||
{
|
||||
return new FacetEntry
|
||||
{
|
||||
FacetId = facetId,
|
||||
Name = facetId,
|
||||
Category = FacetCategory.OsPackages,
|
||||
Selectors = ["/var/lib/dpkg/**"],
|
||||
MerkleRoot = merkleRoot,
|
||||
FileCount = fileCount,
|
||||
TotalBytes = fileCount * 1024,
|
||||
Files = files
|
||||
};
|
||||
}
|
||||
|
||||
private static FacetFileEntry CreateFile(string path, string digest, long size = 1024)
|
||||
{
|
||||
return new FacetFileEntry(path, digest, size, DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region No Drift Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_IdenticalSeals_ReturnsNoDrift()
|
||||
{
|
||||
// Arrange
|
||||
var files = ImmutableArray.Create(
|
||||
CreateFile("/etc/file1.conf", "sha256:aaa"),
|
||||
CreateFile("/etc/file2.conf", "sha256:bbb"));
|
||||
|
||||
var facet = CreateFacetEntry("os-packages-dpkg", "sha256:root123", 2, files);
|
||||
var baseline = CreateBaseline(facet);
|
||||
var current = CreateCurrent(facet);
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.Should().NotBeNull();
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
|
||||
report.TotalChangedFiles.Should().Be(0);
|
||||
report.FacetDrifts.Should().HaveCount(1);
|
||||
report.FacetDrifts[0].HasDrift.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_SameMerkleRoot_ReturnsNoDrift()
|
||||
{
|
||||
// Arrange - same root but files not provided = fast path
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("os-packages-dpkg", "sha256:sameroot", 10));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("os-packages-dpkg", "sha256:sameroot", 10));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
|
||||
report.FacetDrifts[0].DriftScore.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Addition Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_FilesAdded_ReportsAdditions()
|
||||
{
|
||||
// Arrange
|
||||
var baselineFiles = ImmutableArray.Create(
|
||||
CreateFile("/usr/bin/app1", "sha256:aaa"));
|
||||
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/usr/bin/app1", "sha256:aaa"),
|
||||
CreateFile("/usr/bin/app2", "sha256:bbb"));
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("binaries-usr", "sha256:root1", 1, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("binaries-usr", "sha256:root2", 2, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.FacetDrifts.Should().HaveCount(1);
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.Added.Should().HaveCount(1);
|
||||
drift.Added[0].Path.Should().Be("/usr/bin/app2");
|
||||
drift.Removed.Should().BeEmpty();
|
||||
drift.Modified.Should().BeEmpty();
|
||||
drift.HasDrift.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Removal Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_FilesRemoved_ReportsRemovals()
|
||||
{
|
||||
// Arrange
|
||||
var baselineFiles = ImmutableArray.Create(
|
||||
CreateFile("/usr/bin/app1", "sha256:aaa"),
|
||||
CreateFile("/usr/bin/app2", "sha256:bbb"));
|
||||
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/usr/bin/app1", "sha256:aaa"));
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("binaries-usr", "sha256:root1", 2, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("binaries-usr", "sha256:root2", 1, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.Removed.Should().HaveCount(1);
|
||||
drift.Removed[0].Path.Should().Be("/usr/bin/app2");
|
||||
drift.Added.Should().BeEmpty();
|
||||
drift.Modified.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region File Modification Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_FilesModified_ReportsModifications()
|
||||
{
|
||||
// Arrange
|
||||
var baselineFiles = ImmutableArray.Create(
|
||||
CreateFile("/etc/config.yaml", "sha256:oldhash", 512));
|
||||
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/etc/config.yaml", "sha256:newhash", 1024));
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("config-files", "sha256:root1", 1, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("config-files", "sha256:root2", 1, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.Modified.Should().HaveCount(1);
|
||||
drift.Modified[0].Path.Should().Be("/etc/config.yaml");
|
||||
drift.Modified[0].PreviousDigest.Should().Be("sha256:oldhash");
|
||||
drift.Modified[0].CurrentDigest.Should().Be("sha256:newhash");
|
||||
drift.Modified[0].PreviousSizeBytes.Should().Be(512);
|
||||
drift.Modified[0].CurrentSizeBytes.Should().Be(1024);
|
||||
drift.Added.Should().BeEmpty();
|
||||
drift.Removed.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mixed Changes Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_MixedChanges_ReportsAllTypes()
|
||||
{
|
||||
// Arrange
|
||||
var baselineFiles = ImmutableArray.Create(
|
||||
CreateFile("/usr/bin/keep", "sha256:keep"),
|
||||
CreateFile("/usr/bin/modify", "sha256:old"),
|
||||
CreateFile("/usr/bin/remove", "sha256:gone"));
|
||||
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/usr/bin/keep", "sha256:keep"),
|
||||
CreateFile("/usr/bin/modify", "sha256:new"),
|
||||
CreateFile("/usr/bin/add", "sha256:added"));
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("binaries", "sha256:root1", 3, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("binaries", "sha256:root2", 3, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.Added.Should().HaveCount(1);
|
||||
drift.Removed.Should().HaveCount(1);
|
||||
drift.Modified.Should().HaveCount(1);
|
||||
drift.TotalChanges.Should().Be(3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Quota Enforcement Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_WithinQuota_ReturnsOk()
|
||||
{
|
||||
// Arrange - 1 change out of 10 = 10% churn, quota is 15%
|
||||
var baselineFiles = Enumerable.Range(1, 10)
|
||||
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
|
||||
.ToImmutableArray();
|
||||
|
||||
var currentFiles = baselineFiles
|
||||
.Take(9)
|
||||
.Append(CreateFile("/file10", "sha256:changed"))
|
||||
.ToImmutableArray();
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("test-facet", new FacetQuota { MaxChurnPercent = 15, MaxChangedFiles = 5 });
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("test-facet", "sha256:root1", 10, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("test-facet", "sha256:root2", 10, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_ExceedsChurnPercent_ReturnsWarning()
|
||||
{
|
||||
// Arrange - 3 changes out of 10 = 30% churn, quota is 10%
|
||||
var baselineFiles = Enumerable.Range(1, 10)
|
||||
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
|
||||
.ToImmutableArray();
|
||||
|
||||
var currentFiles = baselineFiles
|
||||
.Take(7)
|
||||
.Concat(Enumerable.Range(11, 3).Select(i => CreateFile($"/file{i}", $"sha256:new{i}")))
|
||||
.ToImmutableArray();
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("test-facet", new FacetQuota
|
||||
{
|
||||
MaxChurnPercent = 10,
|
||||
MaxChangedFiles = 100,
|
||||
Action = QuotaExceededAction.Warn
|
||||
});
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("test-facet", "sha256:root1", 10, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("test-facet", "sha256:root2", 10, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Warning);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_ExceedsMaxFiles_WithBlockAction_ReturnsBlocked()
|
||||
{
|
||||
// Arrange - 6 changes, quota is max 5 files with block action
|
||||
var baselineFiles = Enumerable.Range(1, 100)
|
||||
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
|
||||
.ToImmutableArray();
|
||||
|
||||
var currentFiles = baselineFiles
|
||||
.Take(94)
|
||||
.Concat(Enumerable.Range(101, 6).Select(i => CreateFile($"/file{i}", $"sha256:new{i}")))
|
||||
.ToImmutableArray();
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("binaries", new FacetQuota
|
||||
{
|
||||
MaxChurnPercent = 100,
|
||||
MaxChangedFiles = 5,
|
||||
Action = QuotaExceededAction.Block
|
||||
});
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("binaries", "sha256:root1", 100, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("binaries", "sha256:root2", 100, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Blocked);
|
||||
report.FacetDrifts[0].QuotaVerdict.Should().Be(QuotaVerdict.Blocked);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_ExceedsQuota_WithRequireVex_ReturnsRequiresVex()
|
||||
{
|
||||
// Arrange
|
||||
var baselineFiles = ImmutableArray.Create(
|
||||
CreateFile("/deps/package.json", "sha256:old"));
|
||||
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/deps/package.json", "sha256:new"),
|
||||
CreateFile("/deps/package-lock.json", "sha256:lock"));
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("lang-deps", new FacetQuota
|
||||
{
|
||||
MaxChurnPercent = 50,
|
||||
MaxChangedFiles = 1,
|
||||
Action = QuotaExceededAction.RequireVex
|
||||
});
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("lang-deps", "sha256:root1", 1, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("lang-deps", "sha256:root2", 2, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.RequiresVex);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Allowlist Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_AllowlistedFiles_AreExcludedFromDrift()
|
||||
{
|
||||
// Arrange - changes to allowlisted paths should be ignored
|
||||
var baselineFiles = ImmutableArray.Create(
|
||||
CreateFile("/var/lib/dpkg/status", "sha256:old"),
|
||||
CreateFile("/usr/bin/app", "sha256:app"));
|
||||
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/var/lib/dpkg/status", "sha256:new"), // Allowlisted
|
||||
CreateFile("/usr/bin/app", "sha256:app"));
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("os-packages", new FacetQuota
|
||||
{
|
||||
MaxChurnPercent = 0,
|
||||
MaxChangedFiles = 0,
|
||||
Action = QuotaExceededAction.Block,
|
||||
AllowlistGlobs = ["/var/lib/dpkg/**"]
|
||||
});
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("os-packages", "sha256:root1", 2, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("os-packages", "sha256:root2", 2, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Ok);
|
||||
report.FacetDrifts[0].Modified.Should().BeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Multi-Facet Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_MultipleFacets_ReturnsWorstVerdict()
|
||||
{
|
||||
// Arrange - one facet OK, one blocked
|
||||
var okFiles = ImmutableArray.Create(CreateFile("/ok/file", "sha256:same"));
|
||||
var blockFiles = ImmutableArray.Create(
|
||||
CreateFile("/block/file1", "sha256:old1"),
|
||||
CreateFile("/block/file2", "sha256:old2"));
|
||||
var blockCurrentFiles = ImmutableArray.Create(
|
||||
CreateFile("/block/file1", "sha256:new1"),
|
||||
CreateFile("/block/file2", "sha256:new2"));
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("ok-facet", FacetQuota.Default)
|
||||
.Add("block-facet", new FacetQuota
|
||||
{
|
||||
MaxChurnPercent = 0,
|
||||
Action = QuotaExceededAction.Block
|
||||
});
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("ok-facet", "sha256:ok1", 1, okFiles),
|
||||
CreateFacetEntry("block-facet", "sha256:block1", 2, blockFiles));
|
||||
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("ok-facet", "sha256:ok1", 1, okFiles),
|
||||
CreateFacetEntry("block-facet", "sha256:block2", 2, blockCurrentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.OverallVerdict.Should().Be(QuotaVerdict.Blocked);
|
||||
report.FacetDrifts.Should().HaveCount(2);
|
||||
report.FacetDrifts.First(d => d.FacetId == "ok-facet").QuotaVerdict.Should().Be(QuotaVerdict.Ok);
|
||||
report.FacetDrifts.First(d => d.FacetId == "block-facet").QuotaVerdict.Should().Be(QuotaVerdict.Blocked);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_NewFacetAppears_ReportsAsWarning()
|
||||
{
|
||||
// Arrange
|
||||
var baselineFiles = ImmutableArray.Create(CreateFile("/old/file", "sha256:old"));
|
||||
var newFacetFiles = ImmutableArray.Create(CreateFile("/new/file", "sha256:new"));
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("existing-facet", "sha256:root1", 1, baselineFiles));
|
||||
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("existing-facet", "sha256:root1", 1, baselineFiles),
|
||||
CreateFacetEntry("new-facet", "sha256:root2", 1, newFacetFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.FacetDrifts.Should().HaveCount(2);
|
||||
var newDrift = report.FacetDrifts.First(d => d.FacetId == "new-facet");
|
||||
newDrift.QuotaVerdict.Should().Be(QuotaVerdict.Warning);
|
||||
newDrift.Added.Should().HaveCount(1);
|
||||
newDrift.BaselineFileCount.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_FacetRemoved_ReportsAsWarningOrBlock()
|
||||
{
|
||||
// Arrange
|
||||
var removedFacetFiles = ImmutableArray.Create(
|
||||
CreateFile("/removed/file1", "sha256:gone1"),
|
||||
CreateFile("/removed/file2", "sha256:gone2"));
|
||||
|
||||
var quotas = ImmutableDictionary<string, FacetQuota>.Empty
|
||||
.Add("removed-facet", new FacetQuota { Action = QuotaExceededAction.Block });
|
||||
|
||||
var baseline = CreateBaselineWithQuotas(quotas,
|
||||
CreateFacetEntry("removed-facet", "sha256:root1", 2, removedFacetFiles));
|
||||
|
||||
var current = CreateCurrent(); // No facets
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
report.FacetDrifts.Should().HaveCount(1);
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.FacetId.Should().Be("removed-facet");
|
||||
drift.Removed.Should().HaveCount(2);
|
||||
drift.Added.Should().BeEmpty();
|
||||
drift.QuotaVerdict.Should().Be(QuotaVerdict.Blocked);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Drift Score Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_CalculatesDriftScore_BasedOnChanges()
|
||||
{
|
||||
// Arrange - 2 additions, 1 removal, 1 modification out of 10 files
|
||||
// Weighted: 2 + 1 + 0.5 = 3.5 / 10 * 100 = 35%
|
||||
var baselineFiles = Enumerable.Range(1, 10)
|
||||
.Select(i => CreateFile($"/file{i}", $"sha256:hash{i}"))
|
||||
.ToImmutableArray();
|
||||
|
||||
var currentFiles = baselineFiles
|
||||
.Skip(1) // Remove file1
|
||||
.Take(8)
|
||||
.Append(CreateFile("/file10", "sha256:modified")) // Modify file10
|
||||
.Append(CreateFile("/file11", "sha256:new1")) // Add 2 files
|
||||
.Append(CreateFile("/file12", "sha256:new2"))
|
||||
.ToImmutableArray();
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("test", "sha256:root1", 10, baselineFiles));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("test", "sha256:root2", 11, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.DriftScore.Should().BeGreaterThan(0);
|
||||
drift.DriftScore.Should().BeLessThanOrEqualTo(100);
|
||||
drift.ChurnPercent.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_EmptyBaseline_AllFilesAreAdditions()
|
||||
{
|
||||
// Arrange
|
||||
var currentFiles = ImmutableArray.Create(
|
||||
CreateFile("/new/file1", "sha256:new1"),
|
||||
CreateFile("/new/file2", "sha256:new2"));
|
||||
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("empty-facet", "sha256:empty", 0, []));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("empty-facet", "sha256:root", 2, currentFiles));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.Added.Should().HaveCount(2);
|
||||
drift.ChurnPercent.Should().Be(100m); // All new = 100% churn
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_NullFilesInBaseline_FallsBackToRootComparison()
|
||||
{
|
||||
// Arrange - no file details, different roots
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("no-files", "sha256:root1", 10, null));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("no-files", "sha256:root2", 10, null));
|
||||
|
||||
// Act
|
||||
var report = await _detector.DetectDriftAsync(baseline, current, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var drift = report.FacetDrifts[0];
|
||||
drift.DriftScore.Should().Be(100m); // Max drift when can't compute details
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DetectDriftAsync_Cancellation_ThrowsOperationCanceled()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = CreateBaseline(
|
||||
CreateFacetEntry("test", "sha256:root1", 10));
|
||||
var current = CreateCurrent(
|
||||
CreateFacetEntry("test", "sha256:root2", 10));
|
||||
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<OperationCanceledException>(
|
||||
() => _detector.DetectDriftAsync(baseline, current, cts.Token));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,437 @@
|
||||
// <copyright file="FacetDriftVexEmitterTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_003_FACET (QTA-020)
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Facet.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="FacetDriftVexEmitter"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class FacetDriftVexEmitterTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly FacetDriftVexEmitter _emitter;
|
||||
private readonly FacetDriftVexEmitterOptions _options;
|
||||
|
||||
public FacetDriftVexEmitterTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 7, 12, 0, 0, TimeSpan.Zero));
|
||||
_options = FacetDriftVexEmitterOptions.Default;
|
||||
_emitter = new FacetDriftVexEmitter(_options, _timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_WithNoRequiresVexFacets_ReturnsEmptyResult()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReport(QuotaVerdict.Ok);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(0, result.DraftsEmitted);
|
||||
Assert.Empty(result.Drafts);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_WithRequiresVexFacet_CreatesDraft()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, result.DraftsEmitted);
|
||||
Assert.Single(result.Drafts);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftContainsCorrectImageDigest()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex, imageDigest: "sha256:abc123");
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("sha256:abc123", result.ImageDigest);
|
||||
Assert.Equal("sha256:abc123", result.Drafts[0].ImageDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftContainsBaselineSealId()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex, baselineSealId: "seal-xyz");
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("seal-xyz", result.BaselineSealId);
|
||||
Assert.Equal("seal-xyz", result.Drafts[0].BaselineSealId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftHasDeterministicId()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result1 = _emitter.EmitDrafts(context);
|
||||
var result2 = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(result1.Drafts[0].DraftId, result2.Drafts[0].DraftId);
|
||||
Assert.StartsWith("vexfd-", result1.Drafts[0].DraftId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftIdsDifferForDifferentFacets()
|
||||
{
|
||||
// Arrange
|
||||
var facetDrifts = new[]
|
||||
{
|
||||
CreateFacetDrift("facet-a", QuotaVerdict.RequiresVex),
|
||||
CreateFacetDrift("facet-b", QuotaVerdict.RequiresVex)
|
||||
};
|
||||
var report = new FacetDriftReport
|
||||
{
|
||||
ImageDigest = "sha256:abc123",
|
||||
BaselineSealId = "seal-123",
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [.. facetDrifts],
|
||||
OverallVerdict = QuotaVerdict.RequiresVex
|
||||
};
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, result.DraftsEmitted);
|
||||
Assert.NotEqual(result.Drafts[0].DraftId, result.Drafts[1].DraftId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftContainsChurnInformation()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReportWithChurn(25m);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var summary = result.Drafts[0].DriftSummary;
|
||||
Assert.Equal(25m, summary.ChurnPercent);
|
||||
Assert.Equal(100, summary.BaselineFileCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftHasCorrectExpirationTime()
|
||||
{
|
||||
// Arrange
|
||||
var options = new FacetDriftVexEmitterOptions { DraftTtl = TimeSpan.FromDays(14) };
|
||||
var emitter = new FacetDriftVexEmitter(options, _timeProvider);
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var expectedExpiry = _timeProvider.GetUtcNow().AddDays(14);
|
||||
Assert.Equal(expectedExpiry, result.Drafts[0].ExpiresAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftHasCorrectReviewDeadline()
|
||||
{
|
||||
// Arrange
|
||||
var options = new FacetDriftVexEmitterOptions { ReviewSlaDays = 5 };
|
||||
var emitter = new FacetDriftVexEmitter(options, _timeProvider);
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var expectedDeadline = _timeProvider.GetUtcNow().AddDays(5);
|
||||
Assert.Equal(expectedDeadline, result.Drafts[0].ReviewDeadline);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftRequiresReview()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReport(QuotaVerdict.RequiresVex);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Drafts[0].RequiresReview);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_DraftHasEvidenceLinks()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReportWithChanges(added: 5, removed: 3, modified: 2);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var links = result.Drafts[0].EvidenceLinks;
|
||||
Assert.Contains(links, l => l.Type == "facet_drift_analysis");
|
||||
Assert.Contains(links, l => l.Type == "baseline_seal");
|
||||
Assert.Contains(links, l => l.Type == "added_files");
|
||||
Assert.Contains(links, l => l.Type == "removed_files");
|
||||
Assert.Contains(links, l => l.Type == "modified_files");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_RationaleDescribesChurn()
|
||||
{
|
||||
// Arrange - 15 files added out of 100 baseline = 15.0% churn
|
||||
var report = CreateDriftReportWithChurn(15m);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var rationale = result.Drafts[0].Rationale;
|
||||
Assert.Contains("15.0%", rationale);
|
||||
Assert.Contains("quota", rationale, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_HighChurnTriggersWarningInNotes()
|
||||
{
|
||||
// Arrange
|
||||
var options = new FacetDriftVexEmitterOptions { HighChurnThreshold = 20m };
|
||||
var emitter = new FacetDriftVexEmitter(options, _timeProvider);
|
||||
var report = CreateDriftReportWithChurn(35m);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var notes = result.Drafts[0].ReviewerNotes;
|
||||
Assert.NotNull(notes);
|
||||
Assert.Contains("WARNING", notes);
|
||||
Assert.Contains("High churn", notes);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_RemovedFilesTriggersNoteInReviewerNotes()
|
||||
{
|
||||
// Arrange
|
||||
var report = CreateDriftReportWithChanges(added: 0, removed: 5, modified: 0);
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
var notes = result.Drafts[0].ReviewerNotes;
|
||||
Assert.NotNull(notes);
|
||||
Assert.Contains("removed", notes, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_RespectsMaxDraftsLimit()
|
||||
{
|
||||
// Arrange
|
||||
var options = new FacetDriftVexEmitterOptions { MaxDraftsPerBatch = 2 };
|
||||
var emitter = new FacetDriftVexEmitter(options, _timeProvider);
|
||||
|
||||
var facetDrifts = Enumerable.Range(0, 5)
|
||||
.Select(i => CreateFacetDrift($"facet-{i}", QuotaVerdict.RequiresVex))
|
||||
.ToImmutableArray();
|
||||
|
||||
var report = new FacetDriftReport
|
||||
{
|
||||
ImageDigest = "sha256:abc123",
|
||||
BaselineSealId = "seal-123",
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = facetDrifts,
|
||||
OverallVerdict = QuotaVerdict.RequiresVex
|
||||
};
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, result.DraftsEmitted);
|
||||
Assert.Equal(2, result.Drafts.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_SkipsNonRequiresVexFacets()
|
||||
{
|
||||
// Arrange
|
||||
var facetDrifts = new[]
|
||||
{
|
||||
CreateFacetDrift("facet-ok", QuotaVerdict.Ok),
|
||||
CreateFacetDrift("facet-warn", QuotaVerdict.Warning),
|
||||
CreateFacetDrift("facet-block", QuotaVerdict.Blocked),
|
||||
CreateFacetDrift("facet-vex", QuotaVerdict.RequiresVex)
|
||||
};
|
||||
|
||||
var report = new FacetDriftReport
|
||||
{
|
||||
ImageDigest = "sha256:abc123",
|
||||
BaselineSealId = "seal-123",
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [.. facetDrifts],
|
||||
OverallVerdict = QuotaVerdict.RequiresVex
|
||||
};
|
||||
var context = new FacetDriftVexEmissionContext(report);
|
||||
|
||||
// Act
|
||||
var result = _emitter.EmitDrafts(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, result.DraftsEmitted);
|
||||
Assert.Equal("facet-vex", result.Drafts[0].FacetId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EmitDrafts_NullContext_ThrowsArgumentNullException()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentNullException>(() => _emitter.EmitDrafts(null!));
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private FacetDriftReport CreateDriftReport(
|
||||
QuotaVerdict verdict,
|
||||
string imageDigest = "sha256:default",
|
||||
string baselineSealId = "seal-default")
|
||||
{
|
||||
return new FacetDriftReport
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
BaselineSealId = baselineSealId,
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [CreateFacetDrift("test-facet", verdict)],
|
||||
OverallVerdict = verdict
|
||||
};
|
||||
}
|
||||
|
||||
private FacetDriftReport CreateDriftReportWithChurn(decimal churnPercent)
|
||||
{
|
||||
var addedCount = (int)(churnPercent * 100 / 100);
|
||||
var addedFiles = Enumerable.Range(0, addedCount)
|
||||
.Select(i => new FacetFileEntry($"/added{i}.txt", $"sha256:added{i}", 100, null))
|
||||
.ToImmutableArray();
|
||||
|
||||
var facetDrift = new FacetDrift
|
||||
{
|
||||
FacetId = "test-facet",
|
||||
Added = addedFiles,
|
||||
Removed = [],
|
||||
Modified = [],
|
||||
DriftScore = churnPercent,
|
||||
QuotaVerdict = QuotaVerdict.RequiresVex,
|
||||
BaselineFileCount = 100
|
||||
};
|
||||
|
||||
return new FacetDriftReport
|
||||
{
|
||||
ImageDigest = "sha256:churn-test",
|
||||
BaselineSealId = "seal-churn",
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [facetDrift],
|
||||
OverallVerdict = QuotaVerdict.RequiresVex
|
||||
};
|
||||
}
|
||||
|
||||
private FacetDriftReport CreateDriftReportWithChanges(int added, int removed, int modified)
|
||||
{
|
||||
var addedFiles = Enumerable.Range(0, added)
|
||||
.Select(i => new FacetFileEntry($"/added{i}.txt", $"sha256:added{i}", 100, null))
|
||||
.ToImmutableArray();
|
||||
|
||||
var removedFiles = Enumerable.Range(0, removed)
|
||||
.Select(i => new FacetFileEntry($"/removed{i}.txt", $"sha256:removed{i}", 100, null))
|
||||
.ToImmutableArray();
|
||||
|
||||
var modifiedFiles = Enumerable.Range(0, modified)
|
||||
.Select(i => new FacetFileModification(
|
||||
$"/modified{i}.txt",
|
||||
$"sha256:old{i}",
|
||||
$"sha256:new{i}",
|
||||
100,
|
||||
110))
|
||||
.ToImmutableArray();
|
||||
|
||||
var facetDrift = new FacetDrift
|
||||
{
|
||||
FacetId = "test-facet",
|
||||
Added = addedFiles,
|
||||
Removed = removedFiles,
|
||||
Modified = modifiedFiles,
|
||||
DriftScore = added + removed + modified,
|
||||
QuotaVerdict = QuotaVerdict.RequiresVex,
|
||||
BaselineFileCount = 100
|
||||
};
|
||||
|
||||
return new FacetDriftReport
|
||||
{
|
||||
ImageDigest = "sha256:changes-test",
|
||||
BaselineSealId = "seal-changes",
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [facetDrift],
|
||||
OverallVerdict = QuotaVerdict.RequiresVex
|
||||
};
|
||||
}
|
||||
|
||||
private FacetDrift CreateFacetDrift(string facetId, QuotaVerdict verdict)
|
||||
{
|
||||
var addedCount = verdict == QuotaVerdict.RequiresVex ? 50 : 0;
|
||||
var addedFiles = Enumerable.Range(0, addedCount)
|
||||
.Select(i => new FacetFileEntry($"/added{i}.txt", $"sha256:added{i}", 100, null))
|
||||
.ToImmutableArray();
|
||||
|
||||
return new FacetDrift
|
||||
{
|
||||
FacetId = facetId,
|
||||
Added = addedFiles,
|
||||
Removed = [],
|
||||
Modified = [],
|
||||
DriftScore = addedCount,
|
||||
QuotaVerdict = verdict,
|
||||
BaselineFileCount = 100
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
539
src/__Libraries/StellaOps.Facet.Tests/FacetMerkleTreeTests.cs
Normal file
539
src/__Libraries/StellaOps.Facet.Tests/FacetMerkleTreeTests.cs
Normal file
@@ -0,0 +1,539 @@
|
||||
// <copyright file="FacetMerkleTreeTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Facet.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="FacetMerkleTree"/> - determinism and golden values.
|
||||
/// Covers FCT-009 (determinism) and FCT-010 (golden tests).
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class FacetMerkleTreeTests
|
||||
{
|
||||
private readonly FacetMerkleTree _merkleTree;
|
||||
|
||||
public FacetMerkleTreeTests()
|
||||
{
|
||||
_merkleTree = new FacetMerkleTree();
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static FacetFileEntry CreateFile(string path, string digest, long size = 1024)
|
||||
{
|
||||
return new FacetFileEntry(path, digest, size, DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static FacetEntry CreateFacetEntry(string facetId, string merkleRoot)
|
||||
{
|
||||
// Ensure merkleRoot has proper 64-char hex format after sha256: prefix
|
||||
if (!merkleRoot.StartsWith("sha256:", StringComparison.Ordinal) ||
|
||||
merkleRoot.Length != 7 + 64)
|
||||
{
|
||||
// Pad short hashes for testing
|
||||
var hash = merkleRoot.StartsWith("sha256:", StringComparison.Ordinal)
|
||||
? merkleRoot[7..]
|
||||
: merkleRoot;
|
||||
hash = hash.PadRight(64, '0');
|
||||
merkleRoot = $"sha256:{hash}";
|
||||
}
|
||||
|
||||
return new FacetEntry
|
||||
{
|
||||
FacetId = facetId,
|
||||
Name = facetId,
|
||||
Category = FacetCategory.OsPackages,
|
||||
Selectors = ["/**"],
|
||||
MerkleRoot = merkleRoot,
|
||||
FileCount = 1,
|
||||
TotalBytes = 1024
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region FCT-009: Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_SameFiles_ProducesSameRoot()
|
||||
{
|
||||
// Arrange
|
||||
var files1 = new[]
|
||||
{
|
||||
CreateFile("/etc/nginx/nginx.conf", "sha256:aaa111", 512),
|
||||
CreateFile("/etc/hosts", "sha256:bbb222", 256),
|
||||
CreateFile("/usr/bin/nginx", "sha256:ccc333", 10240)
|
||||
};
|
||||
|
||||
var files2 = new[]
|
||||
{
|
||||
CreateFile("/etc/nginx/nginx.conf", "sha256:aaa111", 512),
|
||||
CreateFile("/etc/hosts", "sha256:bbb222", 256),
|
||||
CreateFile("/usr/bin/nginx", "sha256:ccc333", 10240)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root1 = _merkleTree.ComputeRoot(files1);
|
||||
var root2 = _merkleTree.ComputeRoot(files2);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_DifferentOrder_ProducesSameRoot()
|
||||
{
|
||||
// Arrange - files in different order should produce same root (sorted internally)
|
||||
var files1 = new[]
|
||||
{
|
||||
CreateFile("/etc/a.conf", "sha256:aaa", 100),
|
||||
CreateFile("/etc/b.conf", "sha256:bbb", 200),
|
||||
CreateFile("/etc/c.conf", "sha256:ccc", 300)
|
||||
};
|
||||
|
||||
var files2 = new[]
|
||||
{
|
||||
CreateFile("/etc/c.conf", "sha256:ccc", 300),
|
||||
CreateFile("/etc/a.conf", "sha256:aaa", 100),
|
||||
CreateFile("/etc/b.conf", "sha256:bbb", 200)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root1 = _merkleTree.ComputeRoot(files1);
|
||||
var root2 = _merkleTree.ComputeRoot(files2);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_MultipleInvocations_Idempotent()
|
||||
{
|
||||
// Arrange
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/file1", "sha256:hash1", 100),
|
||||
CreateFile("/file2", "sha256:hash2", 200)
|
||||
};
|
||||
|
||||
// Act - compute multiple times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => _merkleTree.ComputeRoot(files))
|
||||
.ToList();
|
||||
|
||||
// Assert - all results should be identical
|
||||
results.Should().AllBeEquivalentTo(results[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_DifferentInstances_ProduceSameRoot()
|
||||
{
|
||||
// Arrange
|
||||
var tree1 = new FacetMerkleTree();
|
||||
var tree2 = new FacetMerkleTree();
|
||||
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/test/file.txt", "sha256:testdigest", 1024)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root1 = tree1.ComputeRoot(files);
|
||||
var root2 = tree2.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeCombinedRoot_SameFacets_ProducesSameRoot()
|
||||
{
|
||||
// Arrange - use proper 64-char hex values
|
||||
var facets1 = new[]
|
||||
{
|
||||
CreateFacetEntry("facet-a", "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
|
||||
CreateFacetEntry("facet-b", "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
|
||||
};
|
||||
|
||||
var facets2 = new[]
|
||||
{
|
||||
CreateFacetEntry("facet-a", "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"),
|
||||
CreateFacetEntry("facet-b", "sha256:bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb")
|
||||
};
|
||||
|
||||
// Act
|
||||
var combined1 = _merkleTree.ComputeCombinedRoot(facets1);
|
||||
var combined2 = _merkleTree.ComputeCombinedRoot(facets2);
|
||||
|
||||
// Assert
|
||||
combined1.Should().Be(combined2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeCombinedRoot_DifferentOrder_ProducesSameRoot()
|
||||
{
|
||||
// Arrange - facets in different order should produce same root
|
||||
var facets1 = new[]
|
||||
{
|
||||
CreateFacetEntry("alpha", "sha256:1111111111111111111111111111111111111111111111111111111111111111"),
|
||||
CreateFacetEntry("beta", "sha256:2222222222222222222222222222222222222222222222222222222222222222"),
|
||||
CreateFacetEntry("gamma", "sha256:3333333333333333333333333333333333333333333333333333333333333333")
|
||||
};
|
||||
|
||||
var facets2 = new[]
|
||||
{
|
||||
CreateFacetEntry("gamma", "sha256:3333333333333333333333333333333333333333333333333333333333333333"),
|
||||
CreateFacetEntry("alpha", "sha256:1111111111111111111111111111111111111111111111111111111111111111"),
|
||||
CreateFacetEntry("beta", "sha256:2222222222222222222222222222222222222222222222222222222222222222")
|
||||
};
|
||||
|
||||
// Act
|
||||
var combined1 = _merkleTree.ComputeCombinedRoot(facets1);
|
||||
var combined2 = _merkleTree.ComputeCombinedRoot(facets2);
|
||||
|
||||
// Assert
|
||||
combined1.Should().Be(combined2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region FCT-010: Golden Tests - Known Inputs to Known Roots
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_EmptyFiles_ReturnsEmptyTreeRoot()
|
||||
{
|
||||
// Arrange
|
||||
var files = Array.Empty<FacetFileEntry>();
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().Be(FacetMerkleTree.EmptyTreeRoot);
|
||||
root.Should().Be("sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeCombinedRoot_EmptyFacets_ReturnsEmptyTreeRoot()
|
||||
{
|
||||
// Arrange
|
||||
var facets = Array.Empty<FacetEntry>();
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeCombinedRoot(facets);
|
||||
|
||||
// Assert
|
||||
root.Should().Be(FacetMerkleTree.EmptyTreeRoot);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_SingleFile_ProducesKnownRoot()
|
||||
{
|
||||
// Arrange - canonical input: "/test|sha256:abc|1024"
|
||||
var files = new[] { CreateFile("/test", "sha256:abc", 1024) };
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().StartWith("sha256:");
|
||||
root.Length.Should().Be(7 + 64); // "sha256:" + 64 hex chars
|
||||
|
||||
// Verify determinism by computing again
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_GoldenTestVector_TwoFiles()
|
||||
{
|
||||
// Arrange - known test vector
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/a", "sha256:0000000000000000000000000000000000000000000000000000000000000001", 100),
|
||||
CreateFile("/b", "sha256:0000000000000000000000000000000000000000000000000000000000000002", 200)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert - root should be stable (capture the actual value for golden test)
|
||||
root.Should().StartWith("sha256:");
|
||||
|
||||
// Run twice to verify determinism
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
|
||||
// Store this as golden value for future regression testing
|
||||
// This is the expected root for this specific input
|
||||
_goldenRoots["two_files_basic"] = root;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_GoldenTestVector_ThreeFiles()
|
||||
{
|
||||
// Arrange - three files tests odd-node tree handling
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/alpha", "sha256:aaaa", 100),
|
||||
CreateFile("/beta", "sha256:bbbb", 200),
|
||||
CreateFile("/gamma", "sha256:cccc", 300)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().StartWith("sha256:");
|
||||
|
||||
// Verify odd-node handling is deterministic
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_GoldenTestVector_FourFiles()
|
||||
{
|
||||
// Arrange - four files tests balanced tree
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/1", "sha256:1111", 1),
|
||||
CreateFile("/2", "sha256:2222", 2),
|
||||
CreateFile("/3", "sha256:3333", 3),
|
||||
CreateFile("/4", "sha256:4444", 4)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert - balanced tree should produce consistent root
|
||||
root.Should().StartWith("sha256:");
|
||||
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
}
|
||||
|
||||
// Dictionary to store golden values for reference
|
||||
private readonly Dictionary<string, string> _goldenRoots = new();
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sensitivity Tests - Different Inputs Must Produce Different Roots
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_DifferentContent_ProducesDifferentRoot()
|
||||
{
|
||||
// Arrange
|
||||
var files1 = new[] { CreateFile("/test", "sha256:aaa", 100) };
|
||||
var files2 = new[] { CreateFile("/test", "sha256:bbb", 100) };
|
||||
|
||||
// Act
|
||||
var root1 = _merkleTree.ComputeRoot(files1);
|
||||
var root2 = _merkleTree.ComputeRoot(files2);
|
||||
|
||||
// Assert
|
||||
root1.Should().NotBe(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_DifferentPath_ProducesDifferentRoot()
|
||||
{
|
||||
// Arrange
|
||||
var files1 = new[] { CreateFile("/path/a", "sha256:same", 100) };
|
||||
var files2 = new[] { CreateFile("/path/b", "sha256:same", 100) };
|
||||
|
||||
// Act
|
||||
var root1 = _merkleTree.ComputeRoot(files1);
|
||||
var root2 = _merkleTree.ComputeRoot(files2);
|
||||
|
||||
// Assert
|
||||
root1.Should().NotBe(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_DifferentSize_ProducesDifferentRoot()
|
||||
{
|
||||
// Arrange
|
||||
var files1 = new[] { CreateFile("/test", "sha256:same", 100) };
|
||||
var files2 = new[] { CreateFile("/test", "sha256:same", 200) };
|
||||
|
||||
// Act
|
||||
var root1 = _merkleTree.ComputeRoot(files1);
|
||||
var root2 = _merkleTree.ComputeRoot(files2);
|
||||
|
||||
// Assert
|
||||
root1.Should().NotBe(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_AdditionalFile_ProducesDifferentRoot()
|
||||
{
|
||||
// Arrange
|
||||
var files1 = new[]
|
||||
{
|
||||
CreateFile("/a", "sha256:aaa", 100)
|
||||
};
|
||||
|
||||
var files2 = new[]
|
||||
{
|
||||
CreateFile("/a", "sha256:aaa", 100),
|
||||
CreateFile("/b", "sha256:bbb", 200)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root1 = _merkleTree.ComputeRoot(files1);
|
||||
var root2 = _merkleTree.ComputeRoot(files2);
|
||||
|
||||
// Assert
|
||||
root1.Should().NotBe(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeCombinedRoot_DifferentFacetRoots_ProducesDifferentCombined()
|
||||
{
|
||||
// Arrange - use proper 64-char hex values
|
||||
var facets1 = new[] { CreateFacetEntry("test", "sha256:0000000000000000000000000000000000000000000000000000000000000001") };
|
||||
var facets2 = new[] { CreateFacetEntry("test", "sha256:0000000000000000000000000000000000000000000000000000000000000002") };
|
||||
|
||||
// Act
|
||||
var combined1 = _merkleTree.ComputeCombinedRoot(facets1);
|
||||
var combined2 = _merkleTree.ComputeCombinedRoot(facets2);
|
||||
|
||||
// Assert
|
||||
combined1.Should().NotBe(combined2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Proof Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyProof_ValidProof_ReturnsTrue()
|
||||
{
|
||||
// Arrange - create a simple tree and manually build proof
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/a", "sha256:aaa", 100),
|
||||
CreateFile("/b", "sha256:bbb", 200)
|
||||
};
|
||||
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
var fileToVerify = files[0];
|
||||
|
||||
// For a 2-node tree, proof is just the sibling's leaf hash
|
||||
// This is a simplified test - real proofs need proper construction
|
||||
// Here we just verify the API works
|
||||
var emptyProof = Array.Empty<byte[]>();
|
||||
|
||||
// Act & Assert - with empty proof, only single-node trees verify
|
||||
// This tests the verification logic exists
|
||||
var singleFile = new[] { CreateFile("/single", "sha256:single", 100) };
|
||||
var singleRoot = _merkleTree.ComputeRoot(singleFile);
|
||||
_merkleTree.VerifyProof(singleFile[0], emptyProof, singleRoot).Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Format Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_ReturnsCorrectFormat()
|
||||
{
|
||||
// Arrange
|
||||
var files = new[] { CreateFile("/test", "sha256:test", 100) };
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().MatchRegex(@"^sha256:[a-f0-9]{64}$");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_WithDifferentAlgorithm_UsesCorrectPrefix()
|
||||
{
|
||||
// Arrange
|
||||
var sha512Tree = new FacetMerkleTree(algorithm: "SHA512");
|
||||
var files = new[] { CreateFile("/test", "sha512:test", 100) };
|
||||
|
||||
// Act
|
||||
var root = sha512Tree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().StartWith("sha512:");
|
||||
root.Length.Should().Be(7 + 128); // "sha512:" + 128 hex chars
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_LargeNumberOfFiles_Succeeds()
|
||||
{
|
||||
// Arrange - 1000 files
|
||||
var files = Enumerable.Range(1, 1000)
|
||||
.Select(i => CreateFile($"/file{i:D4}", $"sha256:{i:D64}", i * 100))
|
||||
.ToArray();
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().StartWith("sha256:");
|
||||
|
||||
// Verify determinism
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_SpecialCharactersInPath_HandledCorrectly()
|
||||
{
|
||||
// Arrange - paths with special characters
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/path with spaces/file.txt", "sha256:aaa", 100),
|
||||
CreateFile("/path/file-with-dash.conf", "sha256:bbb", 200),
|
||||
CreateFile("/path/file_with_underscore.yml", "sha256:ccc", 300)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().StartWith("sha256:");
|
||||
|
||||
// Verify determinism with special chars
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRoot_UnicodeInPath_HandledCorrectly()
|
||||
{
|
||||
// Arrange - Unicode paths (common in international deployments)
|
||||
var files = new[]
|
||||
{
|
||||
CreateFile("/etc/config-日本語.conf", "sha256:aaa", 100),
|
||||
CreateFile("/etc/config-中文.conf", "sha256:bbb", 200)
|
||||
};
|
||||
|
||||
// Act
|
||||
var root = _merkleTree.ComputeRoot(files);
|
||||
|
||||
// Assert
|
||||
root.Should().StartWith("sha256:");
|
||||
|
||||
// Verify determinism with Unicode
|
||||
var root2 = _merkleTree.ComputeRoot(files);
|
||||
root.Should().Be(root2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
389
src/__Libraries/StellaOps.Facet.Tests/GlobFacetExtractorTests.cs
Normal file
389
src/__Libraries/StellaOps.Facet.Tests/GlobFacetExtractorTests.cs
Normal file
@@ -0,0 +1,389 @@
|
||||
// <copyright file="GlobFacetExtractorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Facet.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="GlobFacetExtractor"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class GlobFacetExtractorTests : IDisposable
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly GlobFacetExtractor _extractor;
|
||||
private readonly string _testDir;
|
||||
|
||||
public GlobFacetExtractorTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2026, 1, 6, 12, 0, 0, TimeSpan.Zero));
|
||||
_extractor = new GlobFacetExtractor(_timeProvider);
|
||||
_testDir = Path.Combine(Path.GetTempPath(), $"facet-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testDir))
|
||||
{
|
||||
Directory.Delete(_testDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private void CreateFile(string relativePath, string content)
|
||||
{
|
||||
var fullPath = Path.Combine(_testDir, relativePath.TrimStart('/'));
|
||||
var dir = Path.GetDirectoryName(fullPath);
|
||||
if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir))
|
||||
{
|
||||
Directory.CreateDirectory(dir);
|
||||
}
|
||||
|
||||
File.WriteAllText(fullPath, content, Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static IFacet CreateTestFacet(string id, params string[] selectors)
|
||||
{
|
||||
return new FacetDefinition(id, id, FacetCategory.Configuration, selectors, 10);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Basic Extraction Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_EmptyDirectory_ReturnsEmptyResult()
|
||||
{
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, ct: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Facets.Should().BeEmpty();
|
||||
result.UnmatchedFiles.Should().BeEmpty();
|
||||
result.Stats.TotalFilesProcessed.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_MatchesFileToCorrectFacet()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/nginx/nginx.conf", "server { listen 80; }");
|
||||
CreateFile("/etc/hosts", "127.0.0.1 localhost");
|
||||
CreateFile("/usr/bin/nginx", "binary content");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [
|
||||
CreateTestFacet("config-nginx", "/etc/nginx/**"),
|
||||
CreateTestFacet("binaries", "/usr/bin/*")
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Facets.Should().HaveCount(2);
|
||||
|
||||
var configFacet = result.Facets.First(f => f.FacetId == "config-nginx");
|
||||
configFacet.FileCount.Should().Be(1);
|
||||
configFacet.Files!.Value.Should().Contain(f => f.Path.EndsWith("nginx.conf"));
|
||||
|
||||
var binaryFacet = result.Facets.First(f => f.FacetId == "binaries");
|
||||
binaryFacet.FileCount.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_UnmatchedFiles_ReportedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/random/file.txt", "random content");
|
||||
CreateFile("/etc/nginx/nginx.conf", "server {}");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config-nginx", "/etc/nginx/**")],
|
||||
IncludeFileDetails = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Facets.Should().HaveCount(1);
|
||||
result.UnmatchedFiles.Should().HaveCount(1);
|
||||
result.UnmatchedFiles[0].Path.Should().Contain("random");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Hash Computation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_ComputesCorrectHashFormat()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/test.conf", "test content");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config", "/etc/**")],
|
||||
HashAlgorithm = "SHA256"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Facets.Should().HaveCount(1);
|
||||
var file = result.Facets[0].Files!.Value[0];
|
||||
file.Digest.Should().StartWith("sha256:");
|
||||
file.Digest.Length.Should().Be(7 + 64); // "sha256:" + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_SameContent_ProducesSameHash()
|
||||
{
|
||||
// Arrange
|
||||
const string content = "identical content";
|
||||
CreateFile("/etc/file1.conf", content);
|
||||
CreateFile("/etc/file2.conf", content);
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config", "/etc/**")]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var files = result.Facets[0].Files!.Value;
|
||||
files.Should().HaveCount(2);
|
||||
files[0].Digest.Should().Be(files[1].Digest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merkle Tree Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_ComputesCombinedMerkleRoot()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/nginx/nginx.conf", "server {}");
|
||||
CreateFile("/usr/bin/nginx", "binary");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [
|
||||
CreateTestFacet("config", "/etc/**"),
|
||||
CreateTestFacet("binaries", "/usr/bin/*")
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.CombinedMerkleRoot.Should().NotBeNullOrEmpty();
|
||||
result.CombinedMerkleRoot.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_DeterministicMerkleRoot_ForSameFiles()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/a.conf", "content a");
|
||||
CreateFile("/etc/b.conf", "content b");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config", "/etc/**")]
|
||||
};
|
||||
|
||||
// Act - run twice
|
||||
var result1 = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
var result2 = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - same root both times
|
||||
result1.CombinedMerkleRoot.Should().Be(result2.CombinedMerkleRoot);
|
||||
result1.Facets[0].MerkleRoot.Should().Be(result2.Facets[0].MerkleRoot);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Exclusion Pattern Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_ExcludesMatchingPatterns()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/nginx/nginx.conf", "server {}");
|
||||
CreateFile("/etc/nginx/test.conf.bak", "backup");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config", "/etc/**")],
|
||||
ExcludePatterns = ["**/*.bak"]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Facets[0].FileCount.Should().Be(1);
|
||||
result.SkippedFiles.Should().Contain(f => f.Path.EndsWith(".bak"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Large File Handling Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_SkipsLargeFiles()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/small.conf", "small");
|
||||
var largePath = Path.Combine(_testDir, "etc", "large.bin");
|
||||
await using (var fs = File.Create(largePath))
|
||||
{
|
||||
fs.SetLength(200); // Small but set to test with lower threshold
|
||||
}
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config", "/etc/**")],
|
||||
MaxFileSizeBytes = 100
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Facets[0].FileCount.Should().Be(1);
|
||||
result.SkippedFiles.Should().Contain(f => f.Path.Contains("large.bin"));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Statistics Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_ReturnsCorrectStatistics()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/nginx/nginx.conf", "server {}");
|
||||
CreateFile("/etc/hosts", "127.0.0.1 localhost");
|
||||
CreateFile("/random/file.txt", "unmatched");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [CreateTestFacet("config", "/etc/nginx/**")],
|
||||
IncludeFileDetails = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Stats.TotalFilesProcessed.Should().Be(3);
|
||||
result.Stats.FilesMatched.Should().Be(1);
|
||||
result.Stats.FilesUnmatched.Should().Be(2);
|
||||
result.Stats.Duration.Should().BeGreaterThan(TimeSpan.Zero);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Built-in Facets Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_WithDefaultFacets_MatchesDpkgFiles()
|
||||
{
|
||||
// Arrange - simulate dpkg structure
|
||||
CreateFile("/var/lib/dpkg/status", "Package: nginx\nVersion: 1.0");
|
||||
CreateFile("/var/lib/dpkg/info/nginx.list", "/usr/bin/nginx");
|
||||
|
||||
// Act - use default (all built-in facets)
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, ct: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var dpkgFacet = result.Facets.FirstOrDefault(f => f.FacetId == "os-packages-dpkg");
|
||||
dpkgFacet.Should().NotBeNull();
|
||||
dpkgFacet!.FileCount.Should().BeGreaterThanOrEqualTo(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_WithDefaultFacets_MatchesNodeModules()
|
||||
{
|
||||
// Arrange - simulate node_modules
|
||||
CreateFile("/app/node_modules/express/package.json", "{\"name\":\"express\"}");
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, ct: TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
var npmFacet = result.Facets.FirstOrDefault(f => f.FacetId == "lang-deps-npm");
|
||||
npmFacet.Should().NotBeNull();
|
||||
npmFacet!.FileCount.Should().BeGreaterThanOrEqualTo(1);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Compact Mode Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_CompactMode_OmitsFileDetails()
|
||||
{
|
||||
// Arrange
|
||||
CreateFile("/etc/nginx/nginx.conf", "server {}");
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(
|
||||
_testDir,
|
||||
FacetExtractionOptions.Compact,
|
||||
TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - file details should be null
|
||||
result.Facets.Should().NotBeEmpty();
|
||||
result.Facets[0].Files.Should().BeNull();
|
||||
result.UnmatchedFiles.Should().BeEmpty(); // Compact mode doesn't track unmatched
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Multi-Facet Matching Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExtractFromDirectoryAsync_FileMatchingMultipleFacets_IncludedInBoth()
|
||||
{
|
||||
// Arrange - file matches both patterns
|
||||
CreateFile("/etc/nginx/nginx.conf", "server {}");
|
||||
|
||||
var options = new FacetExtractionOptions
|
||||
{
|
||||
Facets = [
|
||||
CreateTestFacet("all-etc", "/etc/**"),
|
||||
CreateTestFacet("nginx-specific", "/etc/nginx/**")
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _extractor.ExtractFromDirectoryAsync(_testDir, options, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Facets.Should().HaveCount(2);
|
||||
result.Facets.All(f => f.FileCount == 1).Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Facet\StellaOps.Facet.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
166
src/__Libraries/StellaOps.Facet/BuiltInFacets.cs
Normal file
166
src/__Libraries/StellaOps.Facet/BuiltInFacets.cs
Normal file
@@ -0,0 +1,166 @@
|
||||
// <copyright file="BuiltInFacets.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Built-in facet definitions for common image components.
|
||||
/// </summary>
|
||||
public static class BuiltInFacets
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets all built-in facet definitions.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<IFacet> All { get; } = new IFacet[]
|
||||
{
|
||||
// OS Package Managers (priority 10)
|
||||
new FacetDefinition(
|
||||
"os-packages-dpkg",
|
||||
"Debian Packages",
|
||||
FacetCategory.OsPackages,
|
||||
["/var/lib/dpkg/status", "/var/lib/dpkg/info/**"],
|
||||
priority: 10),
|
||||
new FacetDefinition(
|
||||
"os-packages-rpm",
|
||||
"RPM Packages",
|
||||
FacetCategory.OsPackages,
|
||||
["/var/lib/rpm/**", "/usr/lib/sysimage/rpm/**"],
|
||||
priority: 10),
|
||||
new FacetDefinition(
|
||||
"os-packages-apk",
|
||||
"Alpine Packages",
|
||||
FacetCategory.OsPackages,
|
||||
["/lib/apk/db/**"],
|
||||
priority: 10),
|
||||
new FacetDefinition(
|
||||
"os-packages-pacman",
|
||||
"Arch Packages",
|
||||
FacetCategory.OsPackages,
|
||||
["/var/lib/pacman/**"],
|
||||
priority: 10),
|
||||
|
||||
// Language Interpreters (priority 15 - before lang deps)
|
||||
new FacetDefinition(
|
||||
"interpreters-python",
|
||||
"Python Interpreters",
|
||||
FacetCategory.Interpreters,
|
||||
["/usr/bin/python*", "/usr/local/bin/python*"],
|
||||
priority: 15),
|
||||
new FacetDefinition(
|
||||
"interpreters-node",
|
||||
"Node.js Interpreters",
|
||||
FacetCategory.Interpreters,
|
||||
["/usr/bin/node*", "/usr/local/bin/node*"],
|
||||
priority: 15),
|
||||
new FacetDefinition(
|
||||
"interpreters-ruby",
|
||||
"Ruby Interpreters",
|
||||
FacetCategory.Interpreters,
|
||||
["/usr/bin/ruby*", "/usr/local/bin/ruby*"],
|
||||
priority: 15),
|
||||
new FacetDefinition(
|
||||
"interpreters-perl",
|
||||
"Perl Interpreters",
|
||||
FacetCategory.Interpreters,
|
||||
["/usr/bin/perl*", "/usr/local/bin/perl*"],
|
||||
priority: 15),
|
||||
|
||||
// Language Dependencies (priority 20)
|
||||
new FacetDefinition(
|
||||
"lang-deps-npm",
|
||||
"NPM Packages",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/node_modules/**/package.json", "**/package-lock.json"],
|
||||
priority: 20),
|
||||
new FacetDefinition(
|
||||
"lang-deps-pip",
|
||||
"Python Packages",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/site-packages/**/*.dist-info/METADATA", "**/requirements.txt"],
|
||||
priority: 20),
|
||||
new FacetDefinition(
|
||||
"lang-deps-nuget",
|
||||
"NuGet Packages",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/*.deps.json", "**/.nuget/**"],
|
||||
priority: 20),
|
||||
new FacetDefinition(
|
||||
"lang-deps-maven",
|
||||
"Maven Packages",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/.m2/repository/**/*.pom"],
|
||||
priority: 20),
|
||||
new FacetDefinition(
|
||||
"lang-deps-cargo",
|
||||
"Cargo Packages",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/.cargo/registry/**", "**/Cargo.lock"],
|
||||
priority: 20),
|
||||
new FacetDefinition(
|
||||
"lang-deps-go",
|
||||
"Go Modules",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/go.sum", "**/go/pkg/mod/**"],
|
||||
priority: 20),
|
||||
new FacetDefinition(
|
||||
"lang-deps-gem",
|
||||
"Ruby Gems",
|
||||
FacetCategory.LanguageDependencies,
|
||||
["**/gems/**/*.gemspec", "**/Gemfile.lock"],
|
||||
priority: 20),
|
||||
|
||||
// Certificates (priority 25)
|
||||
new FacetDefinition(
|
||||
"certs-system",
|
||||
"System Certificates",
|
||||
FacetCategory.Certificates,
|
||||
["/etc/ssl/certs/**", "/etc/pki/**", "/usr/share/ca-certificates/**"],
|
||||
priority: 25),
|
||||
|
||||
// Binaries (priority 30)
|
||||
new FacetDefinition(
|
||||
"binaries-usr",
|
||||
"System Binaries",
|
||||
FacetCategory.Binaries,
|
||||
["/usr/bin/*", "/usr/sbin/*", "/bin/*", "/sbin/*"],
|
||||
priority: 30),
|
||||
new FacetDefinition(
|
||||
"binaries-lib",
|
||||
"Shared Libraries",
|
||||
FacetCategory.Binaries,
|
||||
["/usr/lib/**/*.so*", "/lib/**/*.so*", "/usr/lib64/**/*.so*", "/lib64/**/*.so*"],
|
||||
priority: 30),
|
||||
|
||||
// Configuration (priority 40)
|
||||
new FacetDefinition(
|
||||
"config-etc",
|
||||
"System Configuration",
|
||||
FacetCategory.Configuration,
|
||||
["/etc/**/*.conf", "/etc/**/*.cfg", "/etc/**/*.yaml", "/etc/**/*.yml", "/etc/**/*.json"],
|
||||
priority: 40),
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Gets a facet by its ID.
|
||||
/// </summary>
|
||||
/// <param name="facetId">The facet identifier.</param>
|
||||
/// <returns>The facet or null if not found.</returns>
|
||||
public static IFacet? GetById(string facetId)
|
||||
=> All.FirstOrDefault(f => f.FacetId == facetId);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all facets in a category.
|
||||
/// </summary>
|
||||
/// <param name="category">The category to filter by.</param>
|
||||
/// <returns>Facets in the category.</returns>
|
||||
public static IEnumerable<IFacet> GetByCategory(FacetCategory category)
|
||||
=> All.Where(f => f.Category == category);
|
||||
|
||||
/// <summary>
|
||||
/// Gets facets sorted by priority (lowest first).
|
||||
/// </summary>
|
||||
/// <returns>Priority-sorted facets.</returns>
|
||||
public static IEnumerable<IFacet> GetByPriority()
|
||||
=> All.OrderBy(f => f.Priority);
|
||||
}
|
||||
53
src/__Libraries/StellaOps.Facet/DefaultCryptoHash.cs
Normal file
53
src/__Libraries/StellaOps.Facet/DefaultCryptoHash.cs
Normal file
@@ -0,0 +1,53 @@
|
||||
// <copyright file="DefaultCryptoHash.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="ICryptoHash"/> using .NET built-in algorithms.
|
||||
/// </summary>
|
||||
public sealed class DefaultCryptoHash : ICryptoHash
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the singleton instance.
|
||||
/// </summary>
|
||||
public static DefaultCryptoHash Instance { get; } = new();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public byte[] ComputeHash(byte[] data, string algorithm)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(data);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm);
|
||||
|
||||
return algorithm.ToUpperInvariant() switch
|
||||
{
|
||||
"SHA256" => SHA256.HashData(data),
|
||||
"SHA384" => SHA384.HashData(data),
|
||||
"SHA512" => SHA512.HashData(data),
|
||||
"SHA1" => SHA1.HashData(data),
|
||||
"MD5" => MD5.HashData(data),
|
||||
_ => throw new NotSupportedException($"Hash algorithm '{algorithm}' is not supported")
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<byte[]> ComputeHashAsync(
|
||||
Stream stream,
|
||||
string algorithm,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm);
|
||||
|
||||
return algorithm.ToUpperInvariant() switch
|
||||
{
|
||||
"SHA256" => await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false),
|
||||
"SHA384" => await SHA384.HashDataAsync(stream, ct).ConfigureAwait(false),
|
||||
"SHA512" => await SHA512.HashDataAsync(stream, ct).ConfigureAwait(false),
|
||||
_ => throw new NotSupportedException($"Hash algorithm '{algorithm}' is not supported for async")
|
||||
};
|
||||
}
|
||||
}
|
||||
46
src/__Libraries/StellaOps.Facet/FacetCategory.cs
Normal file
46
src/__Libraries/StellaOps.Facet/FacetCategory.cs
Normal file
@@ -0,0 +1,46 @@
|
||||
// <copyright file="FacetCategory.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Categories for grouping facets.
|
||||
/// </summary>
|
||||
public enum FacetCategory
|
||||
{
|
||||
/// <summary>
|
||||
/// OS-level package managers (dpkg, rpm, apk, pacman).
|
||||
/// </summary>
|
||||
OsPackages,
|
||||
|
||||
/// <summary>
|
||||
/// Language-specific dependencies (npm, pip, nuget, maven, cargo, go).
|
||||
/// </summary>
|
||||
LanguageDependencies,
|
||||
|
||||
/// <summary>
|
||||
/// Executable binaries and shared libraries.
|
||||
/// </summary>
|
||||
Binaries,
|
||||
|
||||
/// <summary>
|
||||
/// Configuration files (etc, conf, yaml, json).
|
||||
/// </summary>
|
||||
Configuration,
|
||||
|
||||
/// <summary>
|
||||
/// SSL/TLS certificates and trust anchors.
|
||||
/// </summary>
|
||||
Certificates,
|
||||
|
||||
/// <summary>
|
||||
/// Language interpreters (python, node, ruby, perl).
|
||||
/// </summary>
|
||||
Interpreters,
|
||||
|
||||
/// <summary>
|
||||
/// User-defined custom facets.
|
||||
/// </summary>
|
||||
Custom
|
||||
}
|
||||
91
src/__Libraries/StellaOps.Facet/FacetClassifier.cs
Normal file
91
src/__Libraries/StellaOps.Facet/FacetClassifier.cs
Normal file
@@ -0,0 +1,91 @@
|
||||
// <copyright file="FacetClassifier.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Classifies files into facets based on selectors.
|
||||
/// </summary>
|
||||
public sealed class FacetClassifier
|
||||
{
|
||||
private readonly List<(IFacet Facet, GlobMatcher Matcher)> _facetMatchers;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetClassifier"/> class.
|
||||
/// </summary>
|
||||
/// <param name="facets">Facets to classify against (will be sorted by priority).</param>
|
||||
public FacetClassifier(IEnumerable<IFacet> facets)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(facets);
|
||||
|
||||
// Sort by priority (lowest first = highest priority)
|
||||
_facetMatchers = facets
|
||||
.OrderBy(f => f.Priority)
|
||||
.Select(f => (f, GlobMatcher.ForFacet(f)))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a classifier using built-in facets.
|
||||
/// </summary>
|
||||
public static FacetClassifier Default { get; } = new(BuiltInFacets.All);
|
||||
|
||||
/// <summary>
|
||||
/// Classify a file path to a facet.
|
||||
/// </summary>
|
||||
/// <param name="path">The file path to classify.</param>
|
||||
/// <returns>The matching facet or null if no match.</returns>
|
||||
public IFacet? Classify(string path)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(path);
|
||||
|
||||
// First matching facet wins (ordered by priority)
|
||||
foreach (var (facet, matcher) in _facetMatchers)
|
||||
{
|
||||
if (matcher.IsMatch(path))
|
||||
{
|
||||
return facet;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classify a file and return the facet ID.
|
||||
/// </summary>
|
||||
/// <param name="path">The file path to classify.</param>
|
||||
/// <returns>The facet ID or null if no match.</returns>
|
||||
public string? ClassifyToId(string path)
|
||||
=> Classify(path)?.FacetId;
|
||||
|
||||
/// <summary>
|
||||
/// Classify multiple files efficiently.
|
||||
/// </summary>
|
||||
/// <param name="paths">The file paths to classify.</param>
|
||||
/// <returns>Dictionary from facet ID to matched paths.</returns>
|
||||
public Dictionary<string, List<string>> ClassifyMany(IEnumerable<string> paths)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(paths);
|
||||
|
||||
var result = new Dictionary<string, List<string>>();
|
||||
|
||||
foreach (var path in paths)
|
||||
{
|
||||
var facet = Classify(path);
|
||||
if (facet is not null)
|
||||
{
|
||||
if (!result.TryGetValue(facet.FacetId, out var list))
|
||||
{
|
||||
list = [];
|
||||
result[facet.FacetId] = list;
|
||||
}
|
||||
|
||||
list.Add(path);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
55
src/__Libraries/StellaOps.Facet/FacetDefinition.cs
Normal file
55
src/__Libraries/StellaOps.Facet/FacetDefinition.cs
Normal file
@@ -0,0 +1,55 @@
|
||||
// <copyright file="FacetDefinition.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Standard implementation of <see cref="IFacet"/> for defining facets.
|
||||
/// </summary>
|
||||
public sealed class FacetDefinition : IFacet
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public string FacetId { get; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public string Name { get; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public FacetCategory Category { get; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public IReadOnlyList<string> Selectors { get; }
|
||||
|
||||
/// <inheritdoc/>
|
||||
public int Priority { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetDefinition"/> class.
|
||||
/// </summary>
|
||||
/// <param name="facetId">Unique identifier for the facet.</param>
|
||||
/// <param name="name">Human-readable name.</param>
|
||||
/// <param name="category">Facet category.</param>
|
||||
/// <param name="selectors">Glob patterns or paths for file matching.</param>
|
||||
/// <param name="priority">Priority for conflict resolution (lower = higher priority).</param>
|
||||
public FacetDefinition(
|
||||
string facetId,
|
||||
string name,
|
||||
FacetCategory category,
|
||||
string[] selectors,
|
||||
int priority)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(facetId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(name);
|
||||
ArgumentNullException.ThrowIfNull(selectors);
|
||||
|
||||
FacetId = facetId;
|
||||
Name = name;
|
||||
Category = category;
|
||||
Selectors = selectors;
|
||||
Priority = priority;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public override string ToString() => $"{FacetId} ({Name})";
|
||||
}
|
||||
132
src/__Libraries/StellaOps.Facet/FacetDrift.cs
Normal file
132
src/__Libraries/StellaOps.Facet/FacetDrift.cs
Normal file
@@ -0,0 +1,132 @@
|
||||
// <copyright file="FacetDrift.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Drift detection result for a single facet.
|
||||
/// </summary>
|
||||
public sealed record FacetDrift
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the facet this drift applies to.
|
||||
/// </summary>
|
||||
public required string FacetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the files added since baseline.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetFileEntry> Added { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the files removed since baseline.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetFileEntry> Removed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the files modified since baseline.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetFileModification> Modified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the drift score (0-100, higher = more drift).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// The drift score weighs additions, removals, and modifications
|
||||
/// to produce a single measure of change magnitude.
|
||||
/// </remarks>
|
||||
public required decimal DriftScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the quota evaluation result.
|
||||
/// </summary>
|
||||
public required QuotaVerdict QuotaVerdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of files in baseline facet seal.
|
||||
/// </summary>
|
||||
public required int BaselineFileCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of changes (added + removed + modified).
|
||||
/// </summary>
|
||||
public int TotalChanges => Added.Length + Removed.Length + Modified.Length;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the churn percentage = (changes / baseline count) * 100.
|
||||
/// </summary>
|
||||
public decimal ChurnPercent => BaselineFileCount > 0
|
||||
? TotalChanges / (decimal)BaselineFileCount * 100
|
||||
: Added.Length > 0 ? 100m : 0m;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether this facet has any drift.
|
||||
/// </summary>
|
||||
public bool HasDrift => TotalChanges > 0;
|
||||
|
||||
/// <summary>
|
||||
/// Gets a no-drift instance for a facet.
|
||||
/// </summary>
|
||||
public static FacetDrift NoDrift(string facetId, int baselineFileCount) => new()
|
||||
{
|
||||
FacetId = facetId,
|
||||
Added = [],
|
||||
Removed = [],
|
||||
Modified = [],
|
||||
DriftScore = 0m,
|
||||
QuotaVerdict = QuotaVerdict.Ok,
|
||||
BaselineFileCount = baselineFileCount
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Aggregated drift report for all facets in an image.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftReport
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the image digest analyzed.
|
||||
/// </summary>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the baseline seal used for comparison.
|
||||
/// </summary>
|
||||
public required string BaselineSealId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets when the analysis was performed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset AnalyzedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the per-facet drift results.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetDrift> FacetDrifts { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the overall verdict (worst of all facets).
|
||||
/// </summary>
|
||||
public required QuotaVerdict OverallVerdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total files changed across all facets.
|
||||
/// </summary>
|
||||
public int TotalChangedFiles => FacetDrifts.Sum(d => d.TotalChanges);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the facets with any drift.
|
||||
/// </summary>
|
||||
public IEnumerable<FacetDrift> DriftedFacets => FacetDrifts.Where(d => d.HasDrift);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the facets with quota violations.
|
||||
/// </summary>
|
||||
public IEnumerable<FacetDrift> QuotaViolations =>
|
||||
FacetDrifts.Where(d => d.QuotaVerdict is QuotaVerdict.Warning
|
||||
or QuotaVerdict.Blocked
|
||||
or QuotaVerdict.RequiresVex);
|
||||
}
|
||||
353
src/__Libraries/StellaOps.Facet/FacetDriftDetector.cs
Normal file
353
src/__Libraries/StellaOps.Facet/FacetDriftDetector.cs
Normal file
@@ -0,0 +1,353 @@
|
||||
// <copyright file="FacetDriftDetector.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using DotNet.Globbing;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IFacetDriftDetector"/>.
|
||||
/// </summary>
|
||||
public sealed class FacetDriftDetector : IFacetDriftDetector
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetDriftDetector"/> class.
|
||||
/// </summary>
|
||||
/// <param name="timeProvider">Time provider for timestamps.</param>
|
||||
public FacetDriftDetector(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<FacetDriftReport> DetectDriftAsync(
|
||||
FacetSeal baseline,
|
||||
FacetExtractionResult current,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(baseline);
|
||||
ArgumentNullException.ThrowIfNull(current);
|
||||
|
||||
var drifts = new List<FacetDrift>();
|
||||
|
||||
// Build lookup for current facets
|
||||
var currentFacetLookup = current.Facets.ToDictionary(f => f.FacetId);
|
||||
|
||||
// Process each baseline facet
|
||||
foreach (var baselineFacet in baseline.Facets)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
if (currentFacetLookup.TryGetValue(baselineFacet.FacetId, out var currentFacet))
|
||||
{
|
||||
// Both have this facet - compute drift
|
||||
var drift = ComputeFacetDrift(
|
||||
baselineFacet,
|
||||
currentFacet,
|
||||
baseline.GetQuota(baselineFacet.FacetId));
|
||||
|
||||
drifts.Add(drift);
|
||||
currentFacetLookup.Remove(baselineFacet.FacetId);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Facet was removed entirely - all files are "removed"
|
||||
var drift = CreateRemovedFacetDrift(baselineFacet, baseline.GetQuota(baselineFacet.FacetId));
|
||||
drifts.Add(drift);
|
||||
}
|
||||
}
|
||||
|
||||
// Remaining current facets are new
|
||||
foreach (var newFacet in currentFacetLookup.Values)
|
||||
{
|
||||
var drift = CreateNewFacetDrift(newFacet);
|
||||
drifts.Add(drift);
|
||||
}
|
||||
|
||||
var overallVerdict = ComputeOverallVerdict(drifts);
|
||||
|
||||
var report = new FacetDriftReport
|
||||
{
|
||||
ImageDigest = baseline.ImageDigest,
|
||||
BaselineSealId = baseline.CombinedMerkleRoot,
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [.. drifts],
|
||||
OverallVerdict = overallVerdict
|
||||
};
|
||||
|
||||
return Task.FromResult(report);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<FacetDriftReport> DetectDriftAsync(
|
||||
FacetSeal baseline,
|
||||
FacetSeal current,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(baseline);
|
||||
ArgumentNullException.ThrowIfNull(current);
|
||||
|
||||
var drifts = new List<FacetDrift>();
|
||||
|
||||
// Build lookup for current facets
|
||||
var currentFacetLookup = current.Facets.ToDictionary(f => f.FacetId);
|
||||
|
||||
// Process each baseline facet
|
||||
foreach (var baselineFacet in baseline.Facets)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
if (currentFacetLookup.TryGetValue(baselineFacet.FacetId, out var currentFacet))
|
||||
{
|
||||
// Both have this facet - compute drift
|
||||
var drift = ComputeFacetDrift(
|
||||
baselineFacet,
|
||||
currentFacet,
|
||||
baseline.GetQuota(baselineFacet.FacetId));
|
||||
|
||||
drifts.Add(drift);
|
||||
currentFacetLookup.Remove(baselineFacet.FacetId);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Facet was removed entirely
|
||||
var drift = CreateRemovedFacetDrift(baselineFacet, baseline.GetQuota(baselineFacet.FacetId));
|
||||
drifts.Add(drift);
|
||||
}
|
||||
}
|
||||
|
||||
// Remaining current facets are new
|
||||
foreach (var newFacet in currentFacetLookup.Values)
|
||||
{
|
||||
var drift = CreateNewFacetDrift(newFacet);
|
||||
drifts.Add(drift);
|
||||
}
|
||||
|
||||
var overallVerdict = ComputeOverallVerdict(drifts);
|
||||
|
||||
var report = new FacetDriftReport
|
||||
{
|
||||
ImageDigest = current.ImageDigest,
|
||||
BaselineSealId = baseline.CombinedMerkleRoot,
|
||||
AnalyzedAt = _timeProvider.GetUtcNow(),
|
||||
FacetDrifts = [.. drifts],
|
||||
OverallVerdict = overallVerdict
|
||||
};
|
||||
|
||||
return Task.FromResult(report);
|
||||
}
|
||||
|
||||
private static FacetDrift ComputeFacetDrift(
|
||||
FacetEntry baseline,
|
||||
FacetEntry current,
|
||||
FacetQuota quota)
|
||||
{
|
||||
// Quick check: if Merkle roots match, no drift
|
||||
if (baseline.MerkleRoot == current.MerkleRoot)
|
||||
{
|
||||
return FacetDrift.NoDrift(baseline.FacetId, baseline.FileCount);
|
||||
}
|
||||
|
||||
// Need file-level comparison
|
||||
if (baseline.Files is null || current.Files is null)
|
||||
{
|
||||
// Can't compute detailed drift without file entries
|
||||
// Fall back to root-level drift indication
|
||||
return new FacetDrift
|
||||
{
|
||||
FacetId = baseline.FacetId,
|
||||
Added = [],
|
||||
Removed = [],
|
||||
Modified = [],
|
||||
DriftScore = 100m, // Max drift since we can't compute details
|
||||
QuotaVerdict = quota.Action switch
|
||||
{
|
||||
QuotaExceededAction.Block => QuotaVerdict.Blocked,
|
||||
QuotaExceededAction.RequireVex => QuotaVerdict.RequiresVex,
|
||||
_ => QuotaVerdict.Warning
|
||||
},
|
||||
BaselineFileCount = baseline.FileCount
|
||||
};
|
||||
}
|
||||
|
||||
// Build allowlist globs
|
||||
var allowlistGlobs = quota.AllowlistGlobs
|
||||
.Select(p => Glob.Parse(p))
|
||||
.ToList();
|
||||
|
||||
bool IsAllowlisted(string path) => allowlistGlobs.Any(g => g.IsMatch(path));
|
||||
|
||||
// Build file dictionaries
|
||||
var baselineFiles = baseline.Files.Value.ToDictionary(f => f.Path);
|
||||
var currentFiles = current.Files.Value.ToDictionary(f => f.Path);
|
||||
|
||||
var added = new List<FacetFileEntry>();
|
||||
var removed = new List<FacetFileEntry>();
|
||||
var modified = new List<FacetFileModification>();
|
||||
|
||||
// Find additions and modifications
|
||||
foreach (var (path, currentFile) in currentFiles)
|
||||
{
|
||||
if (IsAllowlisted(path))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (baselineFiles.TryGetValue(path, out var baselineFile))
|
||||
{
|
||||
// File exists in both - check for modification
|
||||
if (baselineFile.Digest != currentFile.Digest)
|
||||
{
|
||||
modified.Add(new FacetFileModification(
|
||||
path,
|
||||
baselineFile.Digest,
|
||||
currentFile.Digest,
|
||||
baselineFile.SizeBytes,
|
||||
currentFile.SizeBytes));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// File is new
|
||||
added.Add(currentFile);
|
||||
}
|
||||
}
|
||||
|
||||
// Find removals
|
||||
foreach (var (path, baselineFile) in baselineFiles)
|
||||
{
|
||||
if (IsAllowlisted(path))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentFiles.ContainsKey(path))
|
||||
{
|
||||
removed.Add(baselineFile);
|
||||
}
|
||||
}
|
||||
|
||||
var totalChanges = added.Count + removed.Count + modified.Count;
|
||||
var driftScore = ComputeDriftScore(
|
||||
added.Count,
|
||||
removed.Count,
|
||||
modified.Count,
|
||||
baseline.FileCount);
|
||||
|
||||
var churnPercent = baseline.FileCount > 0
|
||||
? totalChanges / (decimal)baseline.FileCount * 100
|
||||
: added.Count > 0 ? 100m : 0m;
|
||||
|
||||
var verdict = EvaluateQuota(quota, churnPercent, totalChanges);
|
||||
|
||||
return new FacetDrift
|
||||
{
|
||||
FacetId = baseline.FacetId,
|
||||
Added = [.. added],
|
||||
Removed = [.. removed],
|
||||
Modified = [.. modified],
|
||||
DriftScore = driftScore,
|
||||
QuotaVerdict = verdict,
|
||||
BaselineFileCount = baseline.FileCount
|
||||
};
|
||||
}
|
||||
|
||||
private static FacetDrift CreateRemovedFacetDrift(FacetEntry baseline, FacetQuota quota)
|
||||
{
|
||||
var removedFiles = baseline.Files?.ToImmutableArray() ?? [];
|
||||
var verdict = quota.Action switch
|
||||
{
|
||||
QuotaExceededAction.Block => QuotaVerdict.Blocked,
|
||||
QuotaExceededAction.RequireVex => QuotaVerdict.RequiresVex,
|
||||
_ => QuotaVerdict.Warning
|
||||
};
|
||||
|
||||
return new FacetDrift
|
||||
{
|
||||
FacetId = baseline.FacetId,
|
||||
Added = [],
|
||||
Removed = removedFiles,
|
||||
Modified = [],
|
||||
DriftScore = 100m,
|
||||
QuotaVerdict = verdict,
|
||||
BaselineFileCount = baseline.FileCount
|
||||
};
|
||||
}
|
||||
|
||||
private static FacetDrift CreateNewFacetDrift(FacetEntry newFacet)
|
||||
{
|
||||
var addedFiles = newFacet.Files?.ToImmutableArray() ?? [];
|
||||
|
||||
return new FacetDrift
|
||||
{
|
||||
FacetId = newFacet.FacetId,
|
||||
Added = addedFiles,
|
||||
Removed = [],
|
||||
Modified = [],
|
||||
DriftScore = 100m, // All new = max drift from baseline perspective
|
||||
QuotaVerdict = QuotaVerdict.Warning, // New facets get warning by default
|
||||
BaselineFileCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
private static decimal ComputeDriftScore(
|
||||
int added,
|
||||
int removed,
|
||||
int modified,
|
||||
int baselineCount)
|
||||
{
|
||||
if (baselineCount == 0)
|
||||
{
|
||||
return added > 0 ? 100m : 0m;
|
||||
}
|
||||
|
||||
// Weighted score: additions=1.0, removals=1.0, modifications=0.5
|
||||
var weightedChanges = added + removed + (modified * 0.5m);
|
||||
var score = weightedChanges / baselineCount * 100;
|
||||
|
||||
return Math.Min(100m, score);
|
||||
}
|
||||
|
||||
private static QuotaVerdict EvaluateQuota(FacetQuota quota, decimal churnPercent, int totalChanges)
|
||||
{
|
||||
var exceeds = churnPercent > quota.MaxChurnPercent ||
|
||||
totalChanges > quota.MaxChangedFiles;
|
||||
|
||||
if (!exceeds)
|
||||
{
|
||||
return QuotaVerdict.Ok;
|
||||
}
|
||||
|
||||
return quota.Action switch
|
||||
{
|
||||
QuotaExceededAction.Block => QuotaVerdict.Blocked,
|
||||
QuotaExceededAction.RequireVex => QuotaVerdict.RequiresVex,
|
||||
_ => QuotaVerdict.Warning
|
||||
};
|
||||
}
|
||||
|
||||
private static QuotaVerdict ComputeOverallVerdict(List<FacetDrift> drifts)
|
||||
{
|
||||
// Return worst verdict
|
||||
if (drifts.Any(d => d.QuotaVerdict == QuotaVerdict.Blocked))
|
||||
{
|
||||
return QuotaVerdict.Blocked;
|
||||
}
|
||||
|
||||
if (drifts.Any(d => d.QuotaVerdict == QuotaVerdict.RequiresVex))
|
||||
{
|
||||
return QuotaVerdict.RequiresVex;
|
||||
}
|
||||
|
||||
if (drifts.Any(d => d.QuotaVerdict == QuotaVerdict.Warning))
|
||||
{
|
||||
return QuotaVerdict.Warning;
|
||||
}
|
||||
|
||||
return QuotaVerdict.Ok;
|
||||
}
|
||||
}
|
||||
349
src/__Libraries/StellaOps.Facet/FacetDriftVexEmitter.cs
Normal file
349
src/__Libraries/StellaOps.Facet/FacetDriftVexEmitter.cs
Normal file
@@ -0,0 +1,349 @@
|
||||
// <copyright file="FacetDriftVexEmitter.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_003_FACET (QTA-016)
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Emits VEX drafts for facet drift that requires authorization.
|
||||
/// When drift exceeds quota and action is RequireVex, this emitter
|
||||
/// generates a draft VEX document for human review.
|
||||
/// </summary>
|
||||
public sealed class FacetDriftVexEmitter
|
||||
{
|
||||
private readonly FacetDriftVexEmitterOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetDriftVexEmitter"/> class.
|
||||
/// </summary>
|
||||
public FacetDriftVexEmitter(
|
||||
FacetDriftVexEmitterOptions? options = null,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_options = options ?? FacetDriftVexEmitterOptions.Default;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates facet drift and emits VEX drafts for facets that exceed quotas.
|
||||
/// </summary>
|
||||
public FacetDriftVexEmissionResult EmitDrafts(FacetDriftVexEmissionContext context)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var drafts = new List<FacetDriftVexDraft>();
|
||||
|
||||
foreach (var facetDrift in context.DriftReport.FacetDrifts)
|
||||
{
|
||||
// Only emit drafts for facets that require VEX authorization
|
||||
if (facetDrift.QuotaVerdict != QuotaVerdict.RequiresVex)
|
||||
continue;
|
||||
|
||||
var draft = CreateVexDraft(facetDrift, context);
|
||||
drafts.Add(draft);
|
||||
|
||||
if (drafts.Count >= _options.MaxDraftsPerBatch)
|
||||
break;
|
||||
}
|
||||
|
||||
return new FacetDriftVexEmissionResult(
|
||||
ImageDigest: context.DriftReport.ImageDigest,
|
||||
BaselineSealId: context.DriftReport.BaselineSealId,
|
||||
DraftsEmitted: drafts.Count,
|
||||
Drafts: [.. drafts],
|
||||
GeneratedAt: _timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a VEX draft for a single facet that exceeded its quota.
|
||||
/// </summary>
|
||||
private FacetDriftVexDraft CreateVexDraft(
|
||||
FacetDrift drift,
|
||||
FacetDriftVexEmissionContext context)
|
||||
{
|
||||
var draftId = GenerateDraftId(drift, context);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Build evidence links
|
||||
var evidenceLinks = new List<FacetDriftEvidenceLink>
|
||||
{
|
||||
new(
|
||||
Type: "facet_drift_analysis",
|
||||
Uri: $"facet://{context.DriftReport.ImageDigest}/{drift.FacetId}",
|
||||
Description: $"Facet drift analysis for {drift.FacetId}"),
|
||||
new(
|
||||
Type: "baseline_seal",
|
||||
Uri: $"seal://{context.DriftReport.BaselineSealId}",
|
||||
Description: "Baseline seal used for comparison")
|
||||
};
|
||||
|
||||
// Add links for significant changes
|
||||
if (drift.Added.Length > 0)
|
||||
{
|
||||
evidenceLinks.Add(new FacetDriftEvidenceLink(
|
||||
Type: "added_files",
|
||||
Uri: $"facet://{context.DriftReport.ImageDigest}/{drift.FacetId}/added",
|
||||
Description: $"{drift.Added.Length} files added"));
|
||||
}
|
||||
|
||||
if (drift.Removed.Length > 0)
|
||||
{
|
||||
evidenceLinks.Add(new FacetDriftEvidenceLink(
|
||||
Type: "removed_files",
|
||||
Uri: $"facet://{context.DriftReport.ImageDigest}/{drift.FacetId}/removed",
|
||||
Description: $"{drift.Removed.Length} files removed"));
|
||||
}
|
||||
|
||||
if (drift.Modified.Length > 0)
|
||||
{
|
||||
evidenceLinks.Add(new FacetDriftEvidenceLink(
|
||||
Type: "modified_files",
|
||||
Uri: $"facet://{context.DriftReport.ImageDigest}/{drift.FacetId}/modified",
|
||||
Description: $"{drift.Modified.Length} files modified"));
|
||||
}
|
||||
|
||||
return new FacetDriftVexDraft(
|
||||
DraftId: draftId,
|
||||
FacetId: drift.FacetId,
|
||||
ImageDigest: context.DriftReport.ImageDigest,
|
||||
BaselineSealId: context.DriftReport.BaselineSealId,
|
||||
SuggestedStatus: FacetDriftVexStatus.Accepted,
|
||||
Justification: FacetDriftVexJustification.IntentionalChange,
|
||||
Rationale: GenerateRationale(drift, context),
|
||||
DriftSummary: CreateDriftSummary(drift),
|
||||
EvidenceLinks: [.. evidenceLinks],
|
||||
GeneratedAt: now,
|
||||
ExpiresAt: now.Add(_options.DraftTtl),
|
||||
ReviewDeadline: now.AddDays(_options.ReviewSlaDays),
|
||||
RequiresReview: true,
|
||||
ReviewerNotes: GenerateReviewerNotes(drift));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a human-readable rationale for the VEX draft.
|
||||
/// </summary>
|
||||
private string GenerateRationale(FacetDrift drift, FacetDriftVexEmissionContext context)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.Append(CultureInfo.InvariantCulture, $"Facet '{drift.FacetId}' drift exceeds configured quota. ");
|
||||
sb.Append(CultureInfo.InvariantCulture, $"Churn: {drift.ChurnPercent:F1}% ({drift.TotalChanges} of {drift.BaselineFileCount} files changed). ");
|
||||
|
||||
if (drift.Added.Length > 0)
|
||||
{
|
||||
sb.Append($"{drift.Added.Length} file(s) added. ");
|
||||
}
|
||||
|
||||
if (drift.Removed.Length > 0)
|
||||
{
|
||||
sb.Append($"{drift.Removed.Length} file(s) removed. ");
|
||||
}
|
||||
|
||||
if (drift.Modified.Length > 0)
|
||||
{
|
||||
sb.Append($"{drift.Modified.Length} file(s) modified. ");
|
||||
}
|
||||
|
||||
sb.Append("VEX authorization required to proceed with deployment.");
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a summary of the drift for the VEX draft.
|
||||
/// </summary>
|
||||
private static FacetDriftSummary CreateDriftSummary(FacetDrift drift)
|
||||
{
|
||||
return new FacetDriftSummary(
|
||||
TotalChanges: drift.TotalChanges,
|
||||
AddedCount: drift.Added.Length,
|
||||
RemovedCount: drift.Removed.Length,
|
||||
ModifiedCount: drift.Modified.Length,
|
||||
ChurnPercent: drift.ChurnPercent,
|
||||
DriftScore: drift.DriftScore,
|
||||
BaselineFileCount: drift.BaselineFileCount);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates notes for the reviewer.
|
||||
/// </summary>
|
||||
private string GenerateReviewerNotes(FacetDrift drift)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("## Review Checklist");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("- [ ] Verify the drift is intentional and authorized");
|
||||
sb.AppendLine("- [ ] Confirm no security-sensitive files were unexpectedly modified");
|
||||
sb.AppendLine("- [ ] Check if the changes align with the current release scope");
|
||||
|
||||
if (drift.ChurnPercent > _options.HighChurnThreshold)
|
||||
{
|
||||
sb.AppendLine();
|
||||
sb.AppendLine(CultureInfo.InvariantCulture, $"**WARNING**: High churn detected ({drift.ChurnPercent:F1}%). Consider additional scrutiny.");
|
||||
}
|
||||
|
||||
if (drift.Removed.Length > 0)
|
||||
{
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("**NOTE**: Files were removed. Verify these removals are intentional.");
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a deterministic draft ID.
|
||||
/// </summary>
|
||||
private string GenerateDraftId(FacetDrift drift, FacetDriftVexEmissionContext context)
|
||||
{
|
||||
var input = $"{context.DriftReport.ImageDigest}:{drift.FacetId}:{context.DriftReport.BaselineSealId}:{context.DriftReport.AnalyzedAt.Ticks}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return $"vexfd-{Convert.ToHexString(hash).ToLowerInvariant()[..16]}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for facet drift VEX emission.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexEmitterOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum drafts to emit per batch.
|
||||
/// </summary>
|
||||
public int MaxDraftsPerBatch { get; init; } = 50;
|
||||
|
||||
/// <summary>
|
||||
/// Time-to-live for drafts before they expire.
|
||||
/// </summary>
|
||||
public TimeSpan DraftTtl { get; init; } = TimeSpan.FromDays(30);
|
||||
|
||||
/// <summary>
|
||||
/// SLA in days for human review.
|
||||
/// </summary>
|
||||
public int ReviewSlaDays { get; init; } = 7;
|
||||
|
||||
/// <summary>
|
||||
/// Churn percentage that triggers high-churn warning.
|
||||
/// </summary>
|
||||
public decimal HighChurnThreshold { get; init; } = 30m;
|
||||
|
||||
/// <summary>
|
||||
/// Default options.
|
||||
/// </summary>
|
||||
public static FacetDriftVexEmitterOptions Default { get; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context for facet drift VEX emission.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexEmissionContext(
|
||||
FacetDriftReport DriftReport,
|
||||
string? TenantId = null,
|
||||
string? RequestedBy = null);
|
||||
|
||||
/// <summary>
|
||||
/// Result of facet drift VEX emission.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexEmissionResult(
|
||||
string ImageDigest,
|
||||
string BaselineSealId,
|
||||
int DraftsEmitted,
|
||||
ImmutableArray<FacetDriftVexDraft> Drafts,
|
||||
DateTimeOffset GeneratedAt);
|
||||
|
||||
/// <summary>
|
||||
/// A VEX draft generated from facet drift analysis.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexDraft(
|
||||
string DraftId,
|
||||
string FacetId,
|
||||
string ImageDigest,
|
||||
string BaselineSealId,
|
||||
FacetDriftVexStatus SuggestedStatus,
|
||||
FacetDriftVexJustification Justification,
|
||||
string Rationale,
|
||||
FacetDriftSummary DriftSummary,
|
||||
ImmutableArray<FacetDriftEvidenceLink> EvidenceLinks,
|
||||
DateTimeOffset GeneratedAt,
|
||||
DateTimeOffset ExpiresAt,
|
||||
DateTimeOffset ReviewDeadline,
|
||||
bool RequiresReview,
|
||||
string? ReviewerNotes = null);
|
||||
|
||||
/// <summary>
|
||||
/// Summary of drift for a VEX draft.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftSummary(
|
||||
int TotalChanges,
|
||||
int AddedCount,
|
||||
int RemovedCount,
|
||||
int ModifiedCount,
|
||||
decimal ChurnPercent,
|
||||
decimal DriftScore,
|
||||
int BaselineFileCount);
|
||||
|
||||
/// <summary>
|
||||
/// VEX status for facet drift drafts.
|
||||
/// </summary>
|
||||
public enum FacetDriftVexStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Drift is accepted and authorized.
|
||||
/// </summary>
|
||||
Accepted,
|
||||
|
||||
/// <summary>
|
||||
/// Drift is rejected - requires remediation.
|
||||
/// </summary>
|
||||
Rejected,
|
||||
|
||||
/// <summary>
|
||||
/// Under investigation - awaiting review.
|
||||
/// </summary>
|
||||
UnderReview
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX justification for facet drift drafts.
|
||||
/// </summary>
|
||||
public enum FacetDriftVexJustification
|
||||
{
|
||||
/// <summary>
|
||||
/// Drift is an intentional change (upgrade, refactor, etc.).
|
||||
/// </summary>
|
||||
IntentionalChange,
|
||||
|
||||
/// <summary>
|
||||
/// Security fix applied.
|
||||
/// </summary>
|
||||
SecurityFix,
|
||||
|
||||
/// <summary>
|
||||
/// Dependency update.
|
||||
/// </summary>
|
||||
DependencyUpdate,
|
||||
|
||||
/// <summary>
|
||||
/// Configuration change.
|
||||
/// </summary>
|
||||
ConfigurationChange,
|
||||
|
||||
/// <summary>
|
||||
/// Other reason (requires explanation).
|
||||
/// </summary>
|
||||
Other
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence link for facet drift VEX drafts.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftEvidenceLink(
|
||||
string Type,
|
||||
string Uri,
|
||||
string? Description = null);
|
||||
@@ -0,0 +1,73 @@
|
||||
// <copyright file="FacetDriftVexServiceCollectionExtensions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_003_FACET (QTA-019)
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering facet drift VEX services.
|
||||
/// </summary>
|
||||
public static class FacetDriftVexServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds facet drift VEX emitter and workflow services.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configureOptions">Optional options configuration.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddFacetDriftVexServices(
|
||||
this IServiceCollection services,
|
||||
Action<FacetDriftVexEmitterOptions>? configureOptions = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
// Register options
|
||||
var options = FacetDriftVexEmitterOptions.Default;
|
||||
if (configureOptions is not null)
|
||||
{
|
||||
configureOptions(options);
|
||||
}
|
||||
|
||||
services.TryAddSingleton(options);
|
||||
|
||||
// Register emitter
|
||||
services.TryAddSingleton<FacetDriftVexEmitter>();
|
||||
|
||||
// Register workflow
|
||||
services.TryAddScoped<FacetDriftVexWorkflow>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds the in-memory draft store for testing.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddInMemoryFacetDriftVexDraftStore(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
services.TryAddSingleton<IFacetDriftVexDraftStore, InMemoryFacetDriftVexDraftStore>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds facet drift VEX services with in-memory store (for testing).
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configureOptions">Optional options configuration.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddFacetDriftVexServicesWithInMemoryStore(
|
||||
this IServiceCollection services,
|
||||
Action<FacetDriftVexEmitterOptions>? configureOptions = null)
|
||||
{
|
||||
return services
|
||||
.AddFacetDriftVexServices(configureOptions)
|
||||
.AddInMemoryFacetDriftVexDraftStore();
|
||||
}
|
||||
}
|
||||
266
src/__Libraries/StellaOps.Facet/FacetDriftVexWorkflow.cs
Normal file
266
src/__Libraries/StellaOps.Facet/FacetDriftVexWorkflow.cs
Normal file
@@ -0,0 +1,266 @@
|
||||
// <copyright file="FacetDriftVexWorkflow.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_003_FACET (QTA-019)
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Result of a facet drift VEX workflow execution.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexWorkflowResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Emission result from the emitter.
|
||||
/// </summary>
|
||||
public required FacetDriftVexEmissionResult EmissionResult { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of drafts that were newly created.
|
||||
/// </summary>
|
||||
public int NewDraftsCreated { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of drafts that already existed (skipped).
|
||||
/// </summary>
|
||||
public int ExistingDraftsSkipped { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// IDs of newly created drafts.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> CreatedDraftIds { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Any errors that occurred during storage.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> Errors { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether all operations completed successfully.
|
||||
/// </summary>
|
||||
public bool Success => Errors.Length == 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Orchestrates the facet drift VEX workflow: emit drafts + store.
|
||||
/// This integrates with the Excititor VEX workflow by providing
|
||||
/// drafts that can be picked up for human review.
|
||||
/// </summary>
|
||||
public sealed class FacetDriftVexWorkflow
|
||||
{
|
||||
private readonly FacetDriftVexEmitter _emitter;
|
||||
private readonly IFacetDriftVexDraftStore _draftStore;
|
||||
private readonly ILogger<FacetDriftVexWorkflow> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetDriftVexWorkflow"/> class.
|
||||
/// </summary>
|
||||
public FacetDriftVexWorkflow(
|
||||
FacetDriftVexEmitter emitter,
|
||||
IFacetDriftVexDraftStore draftStore,
|
||||
ILogger<FacetDriftVexWorkflow>? logger = null)
|
||||
{
|
||||
_emitter = emitter ?? throw new ArgumentNullException(nameof(emitter));
|
||||
_draftStore = draftStore ?? throw new ArgumentNullException(nameof(draftStore));
|
||||
_logger = logger ?? NullLogger<FacetDriftVexWorkflow>.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Executes the full workflow: emit drafts from drift report and store them.
|
||||
/// </summary>
|
||||
/// <param name="driftReport">The drift report to process.</param>
|
||||
/// <param name="skipExisting">If true, skip creating drafts that already exist.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Workflow result with draft IDs and status.</returns>
|
||||
public async Task<FacetDriftVexWorkflowResult> ExecuteAsync(
|
||||
FacetDriftReport driftReport,
|
||||
bool skipExisting = true,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(driftReport);
|
||||
|
||||
// Emit drafts from drift report
|
||||
var context = new FacetDriftVexEmissionContext(driftReport);
|
||||
var emissionResult = _emitter.EmitDrafts(context);
|
||||
|
||||
if (emissionResult.DraftsEmitted == 0)
|
||||
{
|
||||
_logger.LogDebug("No drafts to emit for image {ImageDigest}", driftReport.ImageDigest);
|
||||
return new FacetDriftVexWorkflowResult
|
||||
{
|
||||
EmissionResult = emissionResult,
|
||||
NewDraftsCreated = 0,
|
||||
ExistingDraftsSkipped = 0
|
||||
};
|
||||
}
|
||||
|
||||
// Store drafts
|
||||
var createdIds = new List<string>();
|
||||
var skippedCount = 0;
|
||||
var errors = new List<string>();
|
||||
|
||||
foreach (var draft in emissionResult.Drafts)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
if (skipExisting)
|
||||
{
|
||||
var exists = await _draftStore.ExistsAsync(
|
||||
draft.ImageDigest,
|
||||
draft.FacetId,
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
if (exists)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Skipping existing draft for {ImageDigest}/{FacetId}",
|
||||
draft.ImageDigest,
|
||||
draft.FacetId);
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
await _draftStore.SaveAsync(draft, ct).ConfigureAwait(false);
|
||||
createdIds.Add(draft.DraftId);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created VEX draft {DraftId} for {ImageDigest}/{FacetId} with churn {ChurnPercent:F1}%",
|
||||
draft.DraftId,
|
||||
draft.ImageDigest,
|
||||
draft.FacetId,
|
||||
draft.DriftSummary.ChurnPercent);
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogError(
|
||||
ex,
|
||||
"Failed to store draft for {ImageDigest}/{FacetId}",
|
||||
draft.ImageDigest,
|
||||
draft.FacetId);
|
||||
errors.Add($"Failed to store draft for {draft.FacetId}: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
return new FacetDriftVexWorkflowResult
|
||||
{
|
||||
EmissionResult = emissionResult,
|
||||
NewDraftsCreated = createdIds.Count,
|
||||
ExistingDraftsSkipped = skippedCount,
|
||||
CreatedDraftIds = [.. createdIds],
|
||||
Errors = [.. errors]
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Approves a draft and converts it to a VEX statement.
|
||||
/// </summary>
|
||||
/// <param name="draftId">ID of the draft to approve.</param>
|
||||
/// <param name="reviewedBy">Who approved the draft.</param>
|
||||
/// <param name="notes">Optional review notes.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>True if approval succeeded.</returns>
|
||||
public async Task<bool> ApproveAsync(
|
||||
string draftId,
|
||||
string reviewedBy,
|
||||
string? notes = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(draftId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(reviewedBy);
|
||||
|
||||
try
|
||||
{
|
||||
await _draftStore.UpdateReviewStatusAsync(
|
||||
draftId,
|
||||
FacetDriftVexReviewStatus.Approved,
|
||||
reviewedBy,
|
||||
notes,
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Draft {DraftId} approved by {ReviewedBy}",
|
||||
draftId,
|
||||
reviewedBy);
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
_logger.LogWarning("Draft {DraftId} not found for approval", draftId);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rejects a draft.
|
||||
/// </summary>
|
||||
/// <param name="draftId">ID of the draft to reject.</param>
|
||||
/// <param name="reviewedBy">Who rejected the draft.</param>
|
||||
/// <param name="reason">Reason for rejection.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>True if rejection succeeded.</returns>
|
||||
public async Task<bool> RejectAsync(
|
||||
string draftId,
|
||||
string reviewedBy,
|
||||
string reason,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(draftId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(reviewedBy);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(reason);
|
||||
|
||||
try
|
||||
{
|
||||
await _draftStore.UpdateReviewStatusAsync(
|
||||
draftId,
|
||||
FacetDriftVexReviewStatus.Rejected,
|
||||
reviewedBy,
|
||||
reason,
|
||||
ct).ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Draft {DraftId} rejected by {ReviewedBy}: {Reason}",
|
||||
draftId,
|
||||
reviewedBy,
|
||||
reason);
|
||||
|
||||
return true;
|
||||
}
|
||||
catch (KeyNotFoundException)
|
||||
{
|
||||
_logger.LogWarning("Draft {DraftId} not found for rejection", draftId);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets drafts pending review.
|
||||
/// </summary>
|
||||
public Task<ImmutableArray<FacetDriftVexDraft>> GetPendingDraftsAsync(
|
||||
string? imageDigest = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var query = new FacetDriftVexDraftQuery
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
ReviewStatus = FacetDriftVexReviewStatus.Pending
|
||||
};
|
||||
|
||||
return _draftStore.QueryAsync(query, ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets drafts that have exceeded their review deadline.
|
||||
/// </summary>
|
||||
public Task<ImmutableArray<FacetDriftVexDraft>> GetOverdueDraftsAsync(CancellationToken ct = default)
|
||||
{
|
||||
return _draftStore.GetOverdueAsync(DateTimeOffset.UtcNow, ct);
|
||||
}
|
||||
}
|
||||
59
src/__Libraries/StellaOps.Facet/FacetEntry.cs
Normal file
59
src/__Libraries/StellaOps.Facet/FacetEntry.cs
Normal file
@@ -0,0 +1,59 @@
|
||||
// <copyright file="FacetEntry.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// A sealed facet entry within a <see cref="FacetSeal"/>.
|
||||
/// </summary>
|
||||
public sealed record FacetEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the facet identifier (e.g., "os-packages-dpkg", "lang-deps-npm").
|
||||
/// </summary>
|
||||
public required string FacetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the human-readable name.
|
||||
/// </summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the category for grouping.
|
||||
/// </summary>
|
||||
public required FacetCategory Category { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the selectors used to identify files in this facet.
|
||||
/// </summary>
|
||||
public required ImmutableArray<string> Selectors { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the Merkle root of all files in this facet.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Format: "sha256:{hex}" computed from sorted file entries.
|
||||
/// </remarks>
|
||||
public required string MerkleRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of files in this facet.
|
||||
/// </summary>
|
||||
public required int FileCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total bytes across all files.
|
||||
/// </summary>
|
||||
public required long TotalBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the optional individual file entries (for detailed audit).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// May be null for compact seals that only store Merkle roots.
|
||||
/// </remarks>
|
||||
public ImmutableArray<FacetFileEntry>? Files { get; init; }
|
||||
}
|
||||
78
src/__Libraries/StellaOps.Facet/FacetExtractionOptions.cs
Normal file
78
src/__Libraries/StellaOps.Facet/FacetExtractionOptions.cs
Normal file
@@ -0,0 +1,78 @@
|
||||
// <copyright file="FacetExtractionOptions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Options for facet extraction operations.
|
||||
/// </summary>
|
||||
public sealed record FacetExtractionOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the facets to extract. If empty, all built-in facets are used.
|
||||
/// </summary>
|
||||
public ImmutableArray<IFacet> Facets { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to include individual file entries in the result.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// When false, only Merkle roots are computed (more compact).
|
||||
/// When true, all file details are preserved for audit.
|
||||
/// </remarks>
|
||||
public bool IncludeFileDetails { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to compute Merkle proofs for each file.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Enabling proofs allows individual file verification against the facet root.
|
||||
/// </remarks>
|
||||
public bool ComputeMerkleProofs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets glob patterns for files to exclude from extraction.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> ExcludePatterns { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Gets the hash algorithm to use (default: SHA256).
|
||||
/// </summary>
|
||||
public string HashAlgorithm { get; init; } = "SHA256";
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether to follow symlinks.
|
||||
/// </summary>
|
||||
public bool FollowSymlinks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the maximum file size to hash (larger files are skipped with placeholder).
|
||||
/// </summary>
|
||||
public long MaxFileSizeBytes { get; init; } = 100 * 1024 * 1024; // 100MB
|
||||
|
||||
/// <summary>
|
||||
/// Gets the default options.
|
||||
/// </summary>
|
||||
public static FacetExtractionOptions Default { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Gets options for compact sealing (no file details, just roots).
|
||||
/// </summary>
|
||||
public static FacetExtractionOptions Compact { get; } = new()
|
||||
{
|
||||
IncludeFileDetails = false,
|
||||
ComputeMerkleProofs = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Gets options for full audit (all details and proofs).
|
||||
/// </summary>
|
||||
public static FacetExtractionOptions FullAudit { get; } = new()
|
||||
{
|
||||
IncludeFileDetails = true,
|
||||
ComputeMerkleProofs = true
|
||||
};
|
||||
}
|
||||
86
src/__Libraries/StellaOps.Facet/FacetExtractionResult.cs
Normal file
86
src/__Libraries/StellaOps.Facet/FacetExtractionResult.cs
Normal file
@@ -0,0 +1,86 @@
|
||||
// <copyright file="FacetExtractionResult.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Result of facet extraction from an image.
|
||||
/// </summary>
|
||||
public sealed record FacetExtractionResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the extracted facet entries.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetEntry> Facets { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets files that didn't match any facet selector.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetFileEntry> UnmatchedFiles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets files that were skipped (too large, unreadable, etc.).
|
||||
/// </summary>
|
||||
public required ImmutableArray<SkippedFile> SkippedFiles { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the combined Merkle root of all facets.
|
||||
/// </summary>
|
||||
public required string CombinedMerkleRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets extraction statistics.
|
||||
/// </summary>
|
||||
public required FacetExtractionStats Stats { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets extraction warnings (non-fatal issues).
|
||||
/// </summary>
|
||||
public ImmutableArray<string> Warnings { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A file that was skipped during extraction.
|
||||
/// </summary>
|
||||
/// <param name="Path">The file path.</param>
|
||||
/// <param name="Reason">Why the file was skipped.</param>
|
||||
public sealed record SkippedFile(string Path, string Reason);
|
||||
|
||||
/// <summary>
|
||||
/// Statistics from facet extraction.
|
||||
/// </summary>
|
||||
public sealed record FacetExtractionStats
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the total files processed.
|
||||
/// </summary>
|
||||
public required int TotalFilesProcessed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total bytes across all files.
|
||||
/// </summary>
|
||||
public required long TotalBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of files matched to facets.
|
||||
/// </summary>
|
||||
public required int FilesMatched { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of files not matching any facet.
|
||||
/// </summary>
|
||||
public required int FilesUnmatched { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the number of files skipped.
|
||||
/// </summary>
|
||||
public required int FilesSkipped { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the extraction duration.
|
||||
/// </summary>
|
||||
public required TimeSpan Duration { get; init; }
|
||||
}
|
||||
18
src/__Libraries/StellaOps.Facet/FacetFileEntry.cs
Normal file
18
src/__Libraries/StellaOps.Facet/FacetFileEntry.cs
Normal file
@@ -0,0 +1,18 @@
|
||||
// <copyright file="FacetFileEntry.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single file within a facet.
|
||||
/// </summary>
|
||||
/// <param name="Path">The file path within the image.</param>
|
||||
/// <param name="Digest">Content hash in "algorithm:hex" format (e.g., "sha256:abc...").</param>
|
||||
/// <param name="SizeBytes">File size in bytes.</param>
|
||||
/// <param name="ModifiedAt">Last modification timestamp, if available.</param>
|
||||
public sealed record FacetFileEntry(
|
||||
string Path,
|
||||
string Digest,
|
||||
long SizeBytes,
|
||||
DateTimeOffset? ModifiedAt);
|
||||
26
src/__Libraries/StellaOps.Facet/FacetFileModification.cs
Normal file
26
src/__Libraries/StellaOps.Facet/FacetFileModification.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
// <copyright file="FacetFileModification.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a modified file between baseline and current state.
|
||||
/// </summary>
|
||||
/// <param name="Path">The file path within the image.</param>
|
||||
/// <param name="PreviousDigest">Content hash from baseline.</param>
|
||||
/// <param name="CurrentDigest">Content hash from current state.</param>
|
||||
/// <param name="PreviousSizeBytes">File size in baseline.</param>
|
||||
/// <param name="CurrentSizeBytes">File size in current state.</param>
|
||||
public sealed record FacetFileModification(
|
||||
string Path,
|
||||
string PreviousDigest,
|
||||
string CurrentDigest,
|
||||
long PreviousSizeBytes,
|
||||
long CurrentSizeBytes)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the size change in bytes (positive = growth, negative = shrinkage).
|
||||
/// </summary>
|
||||
public long SizeDelta => CurrentSizeBytes - PreviousSizeBytes;
|
||||
}
|
||||
194
src/__Libraries/StellaOps.Facet/FacetMerkleTree.cs
Normal file
194
src/__Libraries/StellaOps.Facet/FacetMerkleTree.cs
Normal file
@@ -0,0 +1,194 @@
|
||||
// <copyright file="FacetMerkleTree.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic Merkle roots for facet file sets.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Leaf nodes are computed from: path | digest | size (sorted by path).
|
||||
/// Internal nodes are computed by concatenating and hashing child pairs.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public sealed class FacetMerkleTree
|
||||
{
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly string _algorithm;
|
||||
|
||||
/// <summary>
|
||||
/// Empty tree root constant (SHA-256 of empty string).
|
||||
/// </summary>
|
||||
public const string EmptyTreeRoot = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetMerkleTree"/> class.
|
||||
/// </summary>
|
||||
/// <param name="cryptoHash">Cryptographic hash implementation.</param>
|
||||
/// <param name="algorithm">Hash algorithm to use (default: SHA256).</param>
|
||||
public FacetMerkleTree(ICryptoHash? cryptoHash = null, string algorithm = "SHA256")
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? DefaultCryptoHash.Instance;
|
||||
_algorithm = algorithm;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute Merkle root from file entries.
|
||||
/// </summary>
|
||||
/// <param name="files">Files to include in the tree.</param>
|
||||
/// <returns>Merkle root in "sha256:{hex}" format.</returns>
|
||||
public string ComputeRoot(IEnumerable<FacetFileEntry> files)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(files);
|
||||
|
||||
// Sort files by path for determinism (ordinal comparison)
|
||||
var sortedFiles = files
|
||||
.OrderBy(f => f.Path, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (sortedFiles.Count == 0)
|
||||
{
|
||||
return EmptyTreeRoot;
|
||||
}
|
||||
|
||||
// Build leaf nodes
|
||||
var leaves = sortedFiles
|
||||
.Select(ComputeLeafHash)
|
||||
.ToList();
|
||||
|
||||
// Build tree and return root
|
||||
return ComputeMerkleRootFromNodes(leaves);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute combined root from multiple facet entries.
|
||||
/// </summary>
|
||||
/// <param name="facets">Facet entries with Merkle roots.</param>
|
||||
/// <returns>Combined Merkle root.</returns>
|
||||
public string ComputeCombinedRoot(IEnumerable<FacetEntry> facets)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(facets);
|
||||
|
||||
var facetRoots = facets
|
||||
.OrderBy(f => f.FacetId, StringComparer.Ordinal)
|
||||
.Select(f => HexToBytes(StripAlgorithmPrefix(f.MerkleRoot)))
|
||||
.ToList();
|
||||
|
||||
if (facetRoots.Count == 0)
|
||||
{
|
||||
return EmptyTreeRoot;
|
||||
}
|
||||
|
||||
return ComputeMerkleRootFromNodes(facetRoots);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verify that a file is included in a Merkle root.
|
||||
/// </summary>
|
||||
/// <param name="file">The file to verify.</param>
|
||||
/// <param name="proof">The Merkle proof (sibling hashes).</param>
|
||||
/// <param name="expectedRoot">The expected Merkle root.</param>
|
||||
/// <returns>True if the proof is valid.</returns>
|
||||
public bool VerifyProof(FacetFileEntry file, IReadOnlyList<byte[]> proof, string expectedRoot)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(file);
|
||||
ArgumentNullException.ThrowIfNull(proof);
|
||||
|
||||
var currentHash = ComputeLeafHash(file);
|
||||
|
||||
foreach (var sibling in proof)
|
||||
{
|
||||
// Determine ordering: smaller hash comes first
|
||||
var comparison = CompareHashes(currentHash, sibling);
|
||||
currentHash = comparison <= 0
|
||||
? HashPair(currentHash, sibling)
|
||||
: HashPair(sibling, currentHash);
|
||||
}
|
||||
|
||||
var computedRoot = FormatRoot(currentHash);
|
||||
return string.Equals(computedRoot, expectedRoot, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private byte[] ComputeLeafHash(FacetFileEntry file)
|
||||
{
|
||||
// Canonical leaf format: "path|digest|size"
|
||||
// Using InvariantCulture for size formatting
|
||||
var canonical = string.Create(
|
||||
CultureInfo.InvariantCulture,
|
||||
$"{file.Path}|{file.Digest}|{file.SizeBytes}");
|
||||
|
||||
return _cryptoHash.ComputeHash(Encoding.UTF8.GetBytes(canonical), _algorithm);
|
||||
}
|
||||
|
||||
private string ComputeMerkleRootFromNodes(List<byte[]> nodes)
|
||||
{
|
||||
while (nodes.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (var i = 0; i < nodes.Count; i += 2)
|
||||
{
|
||||
if (i + 1 < nodes.Count)
|
||||
{
|
||||
// Hash pair of nodes
|
||||
nextLevel.Add(HashPair(nodes[i], nodes[i + 1]));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Odd node: promote as-is (or optionally hash with itself)
|
||||
nextLevel.Add(nodes[i]);
|
||||
}
|
||||
}
|
||||
|
||||
nodes = nextLevel;
|
||||
}
|
||||
|
||||
return FormatRoot(nodes[0]);
|
||||
}
|
||||
|
||||
private byte[] HashPair(byte[] left, byte[] right)
|
||||
{
|
||||
var combined = new byte[left.Length + right.Length];
|
||||
left.CopyTo(combined, 0);
|
||||
right.CopyTo(combined, left.Length);
|
||||
return _cryptoHash.ComputeHash(combined, _algorithm);
|
||||
}
|
||||
|
||||
private static int CompareHashes(byte[] a, byte[] b)
|
||||
{
|
||||
var minLength = Math.Min(a.Length, b.Length);
|
||||
for (var i = 0; i < minLength; i++)
|
||||
{
|
||||
var cmp = a[i].CompareTo(b[i]);
|
||||
if (cmp != 0)
|
||||
{
|
||||
return cmp;
|
||||
}
|
||||
}
|
||||
|
||||
return a.Length.CompareTo(b.Length);
|
||||
}
|
||||
|
||||
private string FormatRoot(byte[] hash)
|
||||
{
|
||||
var algPrefix = _algorithm.ToLowerInvariant();
|
||||
var hex = Convert.ToHexString(hash).ToLowerInvariant();
|
||||
return $"{algPrefix}:{hex}";
|
||||
}
|
||||
|
||||
private static string StripAlgorithmPrefix(string digest)
|
||||
{
|
||||
var colonIndex = digest.IndexOf(':', StringComparison.Ordinal);
|
||||
return colonIndex >= 0 ? digest[(colonIndex + 1)..] : digest;
|
||||
}
|
||||
|
||||
private static byte[] HexToBytes(string hex)
|
||||
{
|
||||
return Convert.FromHexString(hex);
|
||||
}
|
||||
}
|
||||
65
src/__Libraries/StellaOps.Facet/FacetQuota.cs
Normal file
65
src/__Libraries/StellaOps.Facet/FacetQuota.cs
Normal file
@@ -0,0 +1,65 @@
|
||||
// <copyright file="FacetQuota.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Quota configuration for a facet, defining acceptable drift thresholds.
|
||||
/// </summary>
|
||||
public sealed record FacetQuota
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or initializes the maximum allowed churn percentage (0-100).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Churn = (added + removed + modified files) / baseline file count * 100.
|
||||
/// </remarks>
|
||||
public decimal MaxChurnPercent { get; init; } = 10m;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or initializes the maximum number of changed files before alert.
|
||||
/// </summary>
|
||||
public int MaxChangedFiles { get; init; } = 50;
|
||||
|
||||
/// <summary>
|
||||
/// Gets or initializes the glob patterns for files exempt from quota enforcement.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Files matching these patterns are excluded from drift calculations.
|
||||
/// Useful for expected changes like logs, timestamps, or cache files.
|
||||
/// </remarks>
|
||||
public ImmutableArray<string> AllowlistGlobs { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Gets or initializes the action when quota is exceeded.
|
||||
/// </summary>
|
||||
public QuotaExceededAction Action { get; init; } = QuotaExceededAction.Warn;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the default quota configuration.
|
||||
/// </summary>
|
||||
public static FacetQuota Default { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Creates a strict quota suitable for high-security binaries.
|
||||
/// </summary>
|
||||
public static FacetQuota Strict { get; } = new()
|
||||
{
|
||||
MaxChurnPercent = 5m,
|
||||
MaxChangedFiles = 10,
|
||||
Action = QuotaExceededAction.Block
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a permissive quota suitable for frequently-updated dependencies.
|
||||
/// </summary>
|
||||
public static FacetQuota Permissive { get; } = new()
|
||||
{
|
||||
MaxChurnPercent = 25m,
|
||||
MaxChangedFiles = 200,
|
||||
Action = QuotaExceededAction.Warn
|
||||
};
|
||||
}
|
||||
114
src/__Libraries/StellaOps.Facet/FacetSeal.cs
Normal file
114
src/__Libraries/StellaOps.Facet/FacetSeal.cs
Normal file
@@ -0,0 +1,114 @@
|
||||
// <copyright file="FacetSeal.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Sealed manifest of facets for an image at a point in time.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// A FacetSeal captures the cryptographic state of all facets in an image,
|
||||
/// enabling drift detection and quota enforcement on subsequent scans.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// The seal can be optionally signed with DSSE for authenticity verification.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public sealed record FacetSeal
|
||||
{
|
||||
/// <summary>
|
||||
/// Current schema version.
|
||||
/// </summary>
|
||||
public const string CurrentSchemaVersion = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// Gets the schema version for forward compatibility.
|
||||
/// </summary>
|
||||
public string SchemaVersion { get; init; } = CurrentSchemaVersion;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the image digest this seal applies to.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Format: "sha256:{hex}" or "sha512:{hex}".
|
||||
/// </remarks>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets when the seal was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the optional build attestation reference (in-toto provenance).
|
||||
/// </summary>
|
||||
public string? BuildAttestationRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the individual facet seals.
|
||||
/// </summary>
|
||||
public required ImmutableArray<FacetEntry> Facets { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the quota configuration per facet.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Keys are facet IDs. Facets without explicit quotas use default values.
|
||||
/// </remarks>
|
||||
public ImmutableDictionary<string, FacetQuota>? Quotas { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the combined Merkle root of all facet roots.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Computed from facet Merkle roots in sorted order by FacetId.
|
||||
/// Enables single-value integrity verification.
|
||||
/// </remarks>
|
||||
public required string CombinedMerkleRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the optional DSSE signature over canonical form.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Base64-encoded DSSE envelope when the seal is signed.
|
||||
/// </remarks>
|
||||
public string? Signature { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the signing key identifier, if signed.
|
||||
/// </summary>
|
||||
public string? SigningKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether this seal is signed.
|
||||
/// </summary>
|
||||
public bool IsSigned => !string.IsNullOrEmpty(Signature);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the quota for a specific facet, or default if not configured.
|
||||
/// </summary>
|
||||
/// <param name="facetId">The facet identifier.</param>
|
||||
/// <returns>The configured quota or <see cref="FacetQuota.Default"/>.</returns>
|
||||
public FacetQuota GetQuota(string facetId)
|
||||
{
|
||||
if (Quotas is not null &&
|
||||
Quotas.TryGetValue(facetId, out var quota))
|
||||
{
|
||||
return quota;
|
||||
}
|
||||
|
||||
return FacetQuota.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a facet entry by ID.
|
||||
/// </summary>
|
||||
/// <param name="facetId">The facet identifier.</param>
|
||||
/// <returns>The facet entry or null if not found.</returns>
|
||||
public FacetEntry? GetFacet(string facetId)
|
||||
=> Facets.FirstOrDefault(f => f.FacetId == facetId);
|
||||
}
|
||||
121
src/__Libraries/StellaOps.Facet/FacetSealer.cs
Normal file
121
src/__Libraries/StellaOps.Facet/FacetSealer.cs
Normal file
@@ -0,0 +1,121 @@
|
||||
// <copyright file="FacetSealer.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Creates <see cref="FacetSeal"/> instances from extraction results.
|
||||
/// </summary>
|
||||
public sealed class FacetSealer
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly FacetMerkleTree _merkleTree;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="FacetSealer"/> class.
|
||||
/// </summary>
|
||||
/// <param name="timeProvider">Time provider for timestamps.</param>
|
||||
/// <param name="cryptoHash">Hash implementation.</param>
|
||||
/// <param name="algorithm">Hash algorithm.</param>
|
||||
public FacetSealer(
|
||||
TimeProvider? timeProvider = null,
|
||||
ICryptoHash? cryptoHash = null,
|
||||
string algorithm = "SHA256")
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_merkleTree = new FacetMerkleTree(cryptoHash, algorithm);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a seal from extraction result.
|
||||
/// </summary>
|
||||
/// <param name="imageDigest">The image digest this seal applies to.</param>
|
||||
/// <param name="extraction">The extraction result.</param>
|
||||
/// <param name="quotas">Optional per-facet quota configuration.</param>
|
||||
/// <param name="buildAttestationRef">Optional build attestation reference.</param>
|
||||
/// <returns>The created seal.</returns>
|
||||
public FacetSeal CreateSeal(
|
||||
string imageDigest,
|
||||
FacetExtractionResult extraction,
|
||||
ImmutableDictionary<string, FacetQuota>? quotas = null,
|
||||
string? buildAttestationRef = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
|
||||
ArgumentNullException.ThrowIfNull(extraction);
|
||||
|
||||
var combinedRoot = _merkleTree.ComputeCombinedRoot(extraction.Facets);
|
||||
|
||||
return new FacetSeal
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
BuildAttestationRef = buildAttestationRef,
|
||||
Facets = extraction.Facets,
|
||||
Quotas = quotas,
|
||||
CombinedMerkleRoot = combinedRoot
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a seal from facet entries directly.
|
||||
/// </summary>
|
||||
/// <param name="imageDigest">The image digest.</param>
|
||||
/// <param name="facets">The facet entries.</param>
|
||||
/// <param name="quotas">Optional quotas.</param>
|
||||
/// <param name="buildAttestationRef">Optional attestation ref.</param>
|
||||
/// <returns>The created seal.</returns>
|
||||
public FacetSeal CreateSeal(
|
||||
string imageDigest,
|
||||
ImmutableArray<FacetEntry> facets,
|
||||
ImmutableDictionary<string, FacetQuota>? quotas = null,
|
||||
string? buildAttestationRef = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
|
||||
|
||||
var combinedRoot = _merkleTree.ComputeCombinedRoot(facets);
|
||||
|
||||
return new FacetSeal
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
BuildAttestationRef = buildAttestationRef,
|
||||
Facets = facets,
|
||||
Quotas = quotas,
|
||||
CombinedMerkleRoot = combinedRoot
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a facet entry from file entries.
|
||||
/// </summary>
|
||||
/// <param name="facet">The facet definition.</param>
|
||||
/// <param name="files">Files belonging to this facet.</param>
|
||||
/// <param name="includeFileDetails">Whether to include individual file entries.</param>
|
||||
/// <returns>The facet entry.</returns>
|
||||
public FacetEntry CreateFacetEntry(
|
||||
IFacet facet,
|
||||
IReadOnlyList<FacetFileEntry> files,
|
||||
bool includeFileDetails = true)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(facet);
|
||||
ArgumentNullException.ThrowIfNull(files);
|
||||
|
||||
var merkleRoot = _merkleTree.ComputeRoot(files);
|
||||
var totalBytes = files.Sum(f => f.SizeBytes);
|
||||
|
||||
return new FacetEntry
|
||||
{
|
||||
FacetId = facet.FacetId,
|
||||
Name = facet.Name,
|
||||
Category = facet.Category,
|
||||
Selectors = [.. facet.Selectors],
|
||||
MerkleRoot = merkleRoot,
|
||||
FileCount = files.Count,
|
||||
TotalBytes = totalBytes,
|
||||
Files = includeFileDetails ? [.. files] : null
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
// <copyright file="FacetServiceCollectionExtensions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for registering facet services with dependency injection.
|
||||
/// </summary>
|
||||
public static class FacetServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Add facet services to the service collection.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddFacetServices(this IServiceCollection services)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
|
||||
// Register crypto hash
|
||||
services.TryAddSingleton<ICryptoHash>(DefaultCryptoHash.Instance);
|
||||
|
||||
// Register Merkle tree
|
||||
services.TryAddSingleton(sp =>
|
||||
{
|
||||
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
|
||||
return new FacetMerkleTree(crypto);
|
||||
});
|
||||
|
||||
// Register classifier with built-in facets
|
||||
services.TryAddSingleton(_ => FacetClassifier.Default);
|
||||
|
||||
// Register sealer
|
||||
services.TryAddSingleton(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
|
||||
return new FacetSealer(timeProvider, crypto);
|
||||
});
|
||||
|
||||
// Register drift detector
|
||||
services.TryAddSingleton<IFacetDriftDetector>(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
return new FacetDriftDetector(timeProvider);
|
||||
});
|
||||
|
||||
// Register facet extractor
|
||||
services.TryAddSingleton<IFacetExtractor>(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
|
||||
var logger = sp.GetService<Microsoft.Extensions.Logging.ILogger<GlobFacetExtractor>>();
|
||||
return new GlobFacetExtractor(timeProvider, crypto, logger);
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Add facet services with custom configuration.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">Configuration action.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection AddFacetServices(
|
||||
this IServiceCollection services,
|
||||
Action<FacetServiceOptions> configure)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(services);
|
||||
ArgumentNullException.ThrowIfNull(configure);
|
||||
|
||||
var options = new FacetServiceOptions();
|
||||
configure(options);
|
||||
|
||||
// Register crypto hash
|
||||
if (options.CryptoHash is not null)
|
||||
{
|
||||
services.AddSingleton(options.CryptoHash);
|
||||
}
|
||||
else
|
||||
{
|
||||
services.TryAddSingleton<ICryptoHash>(DefaultCryptoHash.Instance);
|
||||
}
|
||||
|
||||
// Register custom facets if provided
|
||||
if (options.CustomFacets is { Count: > 0 })
|
||||
{
|
||||
var allFacets = BuiltInFacets.All.Concat(options.CustomFacets).ToList();
|
||||
services.AddSingleton(new FacetClassifier(allFacets));
|
||||
}
|
||||
else
|
||||
{
|
||||
services.TryAddSingleton(_ => FacetClassifier.Default);
|
||||
}
|
||||
|
||||
// Register Merkle tree with algorithm
|
||||
services.TryAddSingleton(sp =>
|
||||
{
|
||||
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
|
||||
return new FacetMerkleTree(crypto, options.HashAlgorithm);
|
||||
});
|
||||
|
||||
// Register sealer
|
||||
services.TryAddSingleton(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
|
||||
return new FacetSealer(timeProvider, crypto, options.HashAlgorithm);
|
||||
});
|
||||
|
||||
// Register drift detector
|
||||
services.TryAddSingleton<IFacetDriftDetector>(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
return new FacetDriftDetector(timeProvider);
|
||||
});
|
||||
|
||||
// Register facet extractor
|
||||
services.TryAddSingleton<IFacetExtractor>(sp =>
|
||||
{
|
||||
var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System;
|
||||
var crypto = sp.GetService<ICryptoHash>() ?? DefaultCryptoHash.Instance;
|
||||
var logger = sp.GetService<Microsoft.Extensions.Logging.ILogger<GlobFacetExtractor>>();
|
||||
return new GlobFacetExtractor(timeProvider, crypto, logger);
|
||||
});
|
||||
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for facet services.
|
||||
/// </summary>
|
||||
public sealed class FacetServiceOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets or sets the hash algorithm (default: SHA256).
|
||||
/// </summary>
|
||||
public string HashAlgorithm { get; set; } = "SHA256";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets custom facet definitions to add to built-ins.
|
||||
/// </summary>
|
||||
public List<IFacet>? CustomFacets { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets a custom crypto hash implementation.
|
||||
/// </summary>
|
||||
public ICryptoHash? CryptoHash { get; set; }
|
||||
}
|
||||
379
src/__Libraries/StellaOps.Facet/GlobFacetExtractor.cs
Normal file
379
src/__Libraries/StellaOps.Facet/GlobFacetExtractor.cs
Normal file
@@ -0,0 +1,379 @@
|
||||
// <copyright file="GlobFacetExtractor.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts facets from container images using glob pattern matching.
|
||||
/// </summary>
|
||||
public sealed class GlobFacetExtractor : IFacetExtractor
|
||||
{
|
||||
private readonly FacetSealer _sealer;
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly ILogger<GlobFacetExtractor> _logger;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="GlobFacetExtractor"/> class.
|
||||
/// </summary>
|
||||
/// <param name="timeProvider">Time provider for timestamps.</param>
|
||||
/// <param name="cryptoHash">Hash implementation.</param>
|
||||
/// <param name="logger">Logger instance.</param>
|
||||
public GlobFacetExtractor(
|
||||
TimeProvider? timeProvider = null,
|
||||
ICryptoHash? cryptoHash = null,
|
||||
ILogger<GlobFacetExtractor>? logger = null)
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? new DefaultCryptoHash();
|
||||
_sealer = new FacetSealer(timeProvider, cryptoHash);
|
||||
_logger = logger ?? NullLogger<GlobFacetExtractor>.Instance;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<FacetExtractionResult> ExtractFromDirectoryAsync(
|
||||
string rootPath,
|
||||
FacetExtractionOptions? options = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(rootPath);
|
||||
|
||||
if (!Directory.Exists(rootPath))
|
||||
{
|
||||
throw new DirectoryNotFoundException($"Directory not found: {rootPath}");
|
||||
}
|
||||
|
||||
options ??= FacetExtractionOptions.Default;
|
||||
var sw = Stopwatch.StartNew();
|
||||
|
||||
var facets = options.Facets.IsDefault || options.Facets.IsEmpty
|
||||
? BuiltInFacets.All.ToList()
|
||||
: options.Facets.ToList();
|
||||
|
||||
var matchers = facets.ToDictionary(f => f.FacetId, GlobMatcher.ForFacet);
|
||||
var excludeMatcher = options.ExcludePatterns.Length > 0
|
||||
? new GlobMatcher(options.ExcludePatterns)
|
||||
: null;
|
||||
|
||||
var facetFiles = facets.ToDictionary(f => f.FacetId, _ => new List<FacetFileEntry>());
|
||||
var unmatchedFiles = new List<FacetFileEntry>();
|
||||
var skippedFiles = new List<SkippedFile>();
|
||||
var warnings = new List<string>();
|
||||
|
||||
int totalFilesProcessed = 0;
|
||||
long totalBytes = 0;
|
||||
|
||||
foreach (var filePath in Directory.EnumerateFiles(rootPath, "*", SearchOption.AllDirectories))
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var relativePath = GetRelativePath(rootPath, filePath);
|
||||
|
||||
// Check exclusion patterns
|
||||
if (excludeMatcher?.IsMatch(relativePath) == true)
|
||||
{
|
||||
skippedFiles.Add(new SkippedFile(relativePath, "Matched exclusion pattern"));
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var fileInfo = new FileInfo(filePath);
|
||||
|
||||
// Skip symlinks if not following
|
||||
if (!options.FollowSymlinks && fileInfo.LinkTarget is not null)
|
||||
{
|
||||
skippedFiles.Add(new SkippedFile(relativePath, "Symlink"));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip files too large
|
||||
if (fileInfo.Length > options.MaxFileSizeBytes)
|
||||
{
|
||||
skippedFiles.Add(new SkippedFile(relativePath, $"Exceeds max size ({fileInfo.Length} > {options.MaxFileSizeBytes})"));
|
||||
continue;
|
||||
}
|
||||
|
||||
totalFilesProcessed++;
|
||||
totalBytes += fileInfo.Length;
|
||||
|
||||
var entry = await CreateFileEntryAsync(filePath, relativePath, fileInfo, options.HashAlgorithm, ct)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
bool matched = false;
|
||||
foreach (var facet in facets)
|
||||
{
|
||||
if (matchers[facet.FacetId].IsMatch(relativePath))
|
||||
{
|
||||
facetFiles[facet.FacetId].Add(entry);
|
||||
matched = true;
|
||||
// Don't break - a file can match multiple facets
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched)
|
||||
{
|
||||
unmatchedFiles.Add(entry);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or UnauthorizedAccessException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process file: {Path}", relativePath);
|
||||
skippedFiles.Add(new SkippedFile(relativePath, ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
|
||||
return BuildResult(facets, facetFiles, unmatchedFiles, skippedFiles, warnings, totalFilesProcessed, totalBytes, sw.Elapsed, options);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<FacetExtractionResult> ExtractFromTarAsync(
|
||||
Stream tarStream,
|
||||
FacetExtractionOptions? options = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(tarStream);
|
||||
|
||||
options ??= FacetExtractionOptions.Default;
|
||||
var sw = Stopwatch.StartNew();
|
||||
|
||||
var facets = options.Facets.IsDefault || options.Facets.IsEmpty
|
||||
? BuiltInFacets.All.ToList()
|
||||
: options.Facets.ToList();
|
||||
|
||||
var matchers = facets.ToDictionary(f => f.FacetId, GlobMatcher.ForFacet);
|
||||
var excludeMatcher = options.ExcludePatterns.Length > 0
|
||||
? new GlobMatcher(options.ExcludePatterns)
|
||||
: null;
|
||||
|
||||
var facetFiles = facets.ToDictionary(f => f.FacetId, _ => new List<FacetFileEntry>());
|
||||
var unmatchedFiles = new List<FacetFileEntry>();
|
||||
var skippedFiles = new List<SkippedFile>();
|
||||
var warnings = new List<string>();
|
||||
|
||||
int totalFilesProcessed = 0;
|
||||
long totalBytes = 0;
|
||||
|
||||
using var tarReader = new TarReader(tarStream, leaveOpen: true);
|
||||
|
||||
while (await tarReader.GetNextEntryAsync(copyData: false, ct).ConfigureAwait(false) is { } tarEntry)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
// Skip non-regular files
|
||||
if (tarEntry.EntryType != TarEntryType.RegularFile &&
|
||||
tarEntry.EntryType != TarEntryType.V7RegularFile)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var path = NormalizeTarPath(tarEntry.Name);
|
||||
|
||||
if (excludeMatcher?.IsMatch(path) == true)
|
||||
{
|
||||
skippedFiles.Add(new SkippedFile(path, "Matched exclusion pattern"));
|
||||
continue;
|
||||
}
|
||||
|
||||
if (tarEntry.Length > options.MaxFileSizeBytes)
|
||||
{
|
||||
skippedFiles.Add(new SkippedFile(path, $"Exceeds max size ({tarEntry.Length} > {options.MaxFileSizeBytes})"));
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip symlinks if not following
|
||||
if (!options.FollowSymlinks && tarEntry.EntryType == TarEntryType.SymbolicLink)
|
||||
{
|
||||
skippedFiles.Add(new SkippedFile(path, "Symlink"));
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
totalFilesProcessed++;
|
||||
totalBytes += tarEntry.Length;
|
||||
|
||||
var entry = await CreateFileEntryFromTarAsync(tarEntry, path, options.HashAlgorithm, ct)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
bool matched = false;
|
||||
foreach (var facet in facets)
|
||||
{
|
||||
if (matchers[facet.FacetId].IsMatch(path))
|
||||
{
|
||||
facetFiles[facet.FacetId].Add(entry);
|
||||
matched = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched)
|
||||
{
|
||||
unmatchedFiles.Add(entry);
|
||||
}
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or InvalidDataException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process tar entry: {Path}", path);
|
||||
skippedFiles.Add(new SkippedFile(path, ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
|
||||
return BuildResult(facets, facetFiles, unmatchedFiles, skippedFiles, warnings, totalFilesProcessed, totalBytes, sw.Elapsed, options);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<FacetExtractionResult> ExtractFromOciLayerAsync(
|
||||
Stream layerStream,
|
||||
FacetExtractionOptions? options = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(layerStream);
|
||||
|
||||
// OCI layers are gzipped tars - decompress then delegate
|
||||
await using var gzipStream = new GZipStream(layerStream, CompressionMode.Decompress, leaveOpen: true);
|
||||
return await ExtractFromTarAsync(gzipStream, options, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private async Task<FacetFileEntry> CreateFileEntryAsync(
|
||||
string fullPath,
|
||||
string relativePath,
|
||||
FileInfo fileInfo,
|
||||
string algorithm,
|
||||
CancellationToken ct)
|
||||
{
|
||||
await using var stream = File.OpenRead(fullPath);
|
||||
var hashBytes = await _cryptoHash.ComputeHashAsync(stream, algorithm, ct).ConfigureAwait(false);
|
||||
var digest = FormatDigest(hashBytes, algorithm);
|
||||
|
||||
return new FacetFileEntry(
|
||||
relativePath,
|
||||
digest,
|
||||
fileInfo.Length,
|
||||
fileInfo.LastWriteTimeUtc);
|
||||
}
|
||||
|
||||
private async Task<FacetFileEntry> CreateFileEntryFromTarAsync(
|
||||
TarEntry entry,
|
||||
string path,
|
||||
string algorithm,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var dataStream = entry.DataStream;
|
||||
if (dataStream is null)
|
||||
{
|
||||
// Empty file
|
||||
var emptyHashBytes = await _cryptoHash.ComputeHashAsync(Stream.Null, algorithm, ct).ConfigureAwait(false);
|
||||
var emptyDigest = FormatDigest(emptyHashBytes, algorithm);
|
||||
return new FacetFileEntry(path, emptyDigest, 0, entry.ModificationTime);
|
||||
}
|
||||
|
||||
var hashBytes = await _cryptoHash.ComputeHashAsync(dataStream, algorithm, ct).ConfigureAwait(false);
|
||||
var digest = FormatDigest(hashBytes, algorithm);
|
||||
|
||||
return new FacetFileEntry(
|
||||
path,
|
||||
digest,
|
||||
entry.Length,
|
||||
entry.ModificationTime);
|
||||
}
|
||||
|
||||
private static string FormatDigest(byte[] hashBytes, string algorithm)
|
||||
{
|
||||
var hex = Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
return $"{algorithm.ToLowerInvariant()}:{hex}";
|
||||
}
|
||||
|
||||
private FacetExtractionResult BuildResult(
|
||||
List<IFacet> facets,
|
||||
Dictionary<string, List<FacetFileEntry>> facetFiles,
|
||||
List<FacetFileEntry> unmatchedFiles,
|
||||
List<SkippedFile> skippedFiles,
|
||||
List<string> warnings,
|
||||
int totalFilesProcessed,
|
||||
long totalBytes,
|
||||
TimeSpan duration,
|
||||
FacetExtractionOptions options)
|
||||
{
|
||||
var facetEntries = new List<FacetEntry>();
|
||||
int filesMatched = 0;
|
||||
|
||||
foreach (var facet in facets)
|
||||
{
|
||||
var files = facetFiles[facet.FacetId];
|
||||
if (files.Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
filesMatched += files.Count;
|
||||
|
||||
// Sort files deterministically for consistent Merkle root
|
||||
var sortedFiles = files.OrderBy(f => f.Path, StringComparer.Ordinal).ToList();
|
||||
|
||||
var entry = _sealer.CreateFacetEntry(facet, sortedFiles, options.IncludeFileDetails);
|
||||
facetEntries.Add(entry);
|
||||
}
|
||||
|
||||
// Sort facet entries deterministically
|
||||
var sortedFacets = facetEntries.OrderBy(f => f.FacetId, StringComparer.Ordinal).ToImmutableArray();
|
||||
|
||||
var merkleTree = new FacetMerkleTree(_cryptoHash);
|
||||
var combinedRoot = merkleTree.ComputeCombinedRoot(sortedFacets);
|
||||
|
||||
var stats = new FacetExtractionStats
|
||||
{
|
||||
TotalFilesProcessed = totalFilesProcessed,
|
||||
TotalBytes = totalBytes,
|
||||
FilesMatched = filesMatched,
|
||||
FilesUnmatched = unmatchedFiles.Count,
|
||||
FilesSkipped = skippedFiles.Count,
|
||||
Duration = duration
|
||||
};
|
||||
|
||||
return new FacetExtractionResult
|
||||
{
|
||||
Facets = sortedFacets,
|
||||
UnmatchedFiles = options.IncludeFileDetails
|
||||
? [.. unmatchedFiles.OrderBy(f => f.Path, StringComparer.Ordinal)]
|
||||
: [],
|
||||
SkippedFiles = [.. skippedFiles],
|
||||
CombinedMerkleRoot = combinedRoot,
|
||||
Stats = stats,
|
||||
Warnings = [.. warnings]
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetRelativePath(string rootPath, string fullPath)
|
||||
{
|
||||
var relative = Path.GetRelativePath(rootPath, fullPath);
|
||||
// Normalize to Unix-style path with leading slash
|
||||
return "/" + relative.Replace('\\', '/');
|
||||
}
|
||||
|
||||
private static string NormalizeTarPath(string path)
|
||||
{
|
||||
// Remove leading ./ if present
|
||||
if (path.StartsWith("./", StringComparison.Ordinal))
|
||||
{
|
||||
path = path[2..];
|
||||
}
|
||||
|
||||
// Ensure leading slash
|
||||
if (!path.StartsWith('/'))
|
||||
{
|
||||
path = "/" + path;
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
}
|
||||
70
src/__Libraries/StellaOps.Facet/GlobMatcher.cs
Normal file
70
src/__Libraries/StellaOps.Facet/GlobMatcher.cs
Normal file
@@ -0,0 +1,70 @@
|
||||
// <copyright file="GlobMatcher.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using DotNet.Globbing;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Utility for matching file paths against glob patterns.
|
||||
/// </summary>
|
||||
public sealed class GlobMatcher
|
||||
{
|
||||
private readonly List<Glob> _globs;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="GlobMatcher"/> class.
|
||||
/// </summary>
|
||||
/// <param name="patterns">Glob patterns to match against.</param>
|
||||
public GlobMatcher(IEnumerable<string> patterns)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(patterns);
|
||||
|
||||
_globs = patterns
|
||||
.Select(p => Glob.Parse(NormalizePattern(p)))
|
||||
.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if a path matches any of the patterns.
|
||||
/// </summary>
|
||||
/// <param name="path">The path to check (Unix-style).</param>
|
||||
/// <returns>True if any pattern matches.</returns>
|
||||
public bool IsMatch(string path)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(path);
|
||||
|
||||
var normalizedPath = NormalizePath(path);
|
||||
return _globs.Any(g => g.IsMatch(normalizedPath));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a matcher for a single facet.
|
||||
/// </summary>
|
||||
/// <param name="facet">The facet to create a matcher for.</param>
|
||||
/// <returns>A GlobMatcher for the facet's selectors.</returns>
|
||||
public static GlobMatcher ForFacet(IFacet facet)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(facet);
|
||||
return new GlobMatcher(facet.Selectors);
|
||||
}
|
||||
|
||||
private static string NormalizePattern(string pattern)
|
||||
{
|
||||
// Ensure patterns use forward slashes
|
||||
return pattern.Replace('\\', '/');
|
||||
}
|
||||
|
||||
private static string NormalizePath(string path)
|
||||
{
|
||||
// Ensure paths use forward slashes and are rooted
|
||||
var normalized = path.Replace('\\', '/');
|
||||
if (!normalized.StartsWith('/'))
|
||||
{
|
||||
normalized = "/" + normalized;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
}
|
||||
32
src/__Libraries/StellaOps.Facet/ICryptoHash.cs
Normal file
32
src/__Libraries/StellaOps.Facet/ICryptoHash.cs
Normal file
@@ -0,0 +1,32 @@
|
||||
// <copyright file="ICryptoHash.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Abstraction for cryptographic hash operations.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// This interface allows the facet library to be used with different
|
||||
/// cryptographic implementations (e.g., built-in .NET, BouncyCastle, HSM).
|
||||
/// </remarks>
|
||||
public interface ICryptoHash
|
||||
{
|
||||
/// <summary>
|
||||
/// Compute hash of the given data.
|
||||
/// </summary>
|
||||
/// <param name="data">Data to hash.</param>
|
||||
/// <param name="algorithm">Algorithm name (e.g., "SHA256", "SHA512").</param>
|
||||
/// <returns>Hash bytes.</returns>
|
||||
byte[] ComputeHash(byte[] data, string algorithm);
|
||||
|
||||
/// <summary>
|
||||
/// Compute hash of a stream.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream to hash.</param>
|
||||
/// <param name="algorithm">Algorithm name.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Hash bytes.</returns>
|
||||
Task<byte[]> ComputeHashAsync(Stream stream, string algorithm, CancellationToken ct = default);
|
||||
}
|
||||
60
src/__Libraries/StellaOps.Facet/IFacet.cs
Normal file
60
src/__Libraries/StellaOps.Facet/IFacet.cs
Normal file
@@ -0,0 +1,60 @@
|
||||
// <copyright file="IFacet.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a trackable slice of an image.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// A facet defines a logical grouping of files within a container image
|
||||
/// that can be tracked independently for sealing and drift detection.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// Examples of facets: OS packages, language dependencies, binaries, config files.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public interface IFacet
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the unique identifier for this facet type.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Format: "{category}-{specifics}" e.g., "os-packages-dpkg", "lang-deps-npm".
|
||||
/// </remarks>
|
||||
string FacetId { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the human-readable name.
|
||||
/// </summary>
|
||||
string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the facet category for grouping.
|
||||
/// </summary>
|
||||
FacetCategory Category { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the glob patterns or path selectors for files in this facet.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>Selectors support:</para>
|
||||
/// <list type="bullet">
|
||||
/// <item><description>Glob patterns: "**/*.json", "/usr/bin/*"</description></item>
|
||||
/// <item><description>Exact paths: "/var/lib/dpkg/status"</description></item>
|
||||
/// <item><description>Directory patterns: "/etc/**"</description></item>
|
||||
/// </list>
|
||||
/// </remarks>
|
||||
IReadOnlyList<string> Selectors { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the priority for conflict resolution when files match multiple facets.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Lower values = higher priority. A file matching multiple facets
|
||||
/// will be assigned to the facet with the lowest priority value.
|
||||
/// </remarks>
|
||||
int Priority { get; }
|
||||
}
|
||||
35
src/__Libraries/StellaOps.Facet/IFacetDriftDetector.cs
Normal file
35
src/__Libraries/StellaOps.Facet/IFacetDriftDetector.cs
Normal file
@@ -0,0 +1,35 @@
|
||||
// <copyright file="IFacetDriftDetector.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Detects drift between a baseline seal and current state.
|
||||
/// </summary>
|
||||
public interface IFacetDriftDetector
|
||||
{
|
||||
/// <summary>
|
||||
/// Compare current extraction result against a baseline seal.
|
||||
/// </summary>
|
||||
/// <param name="baseline">The baseline facet seal.</param>
|
||||
/// <param name="current">The current extraction result.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Drift report with per-facet analysis.</returns>
|
||||
Task<FacetDriftReport> DetectDriftAsync(
|
||||
FacetSeal baseline,
|
||||
FacetExtractionResult current,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Compare two seals.
|
||||
/// </summary>
|
||||
/// <param name="baseline">The baseline seal.</param>
|
||||
/// <param name="current">The current seal.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Drift report with per-facet analysis.</returns>
|
||||
Task<FacetDriftReport> DetectDriftAsync(
|
||||
FacetSeal baseline,
|
||||
FacetSeal current,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
329
src/__Libraries/StellaOps.Facet/IFacetDriftVexDraftStore.cs
Normal file
329
src/__Libraries/StellaOps.Facet/IFacetDriftVexDraftStore.cs
Normal file
@@ -0,0 +1,329 @@
|
||||
// <copyright file="IFacetDriftVexDraftStore.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_003_FACET (QTA-018)
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Query parameters for listing VEX drafts.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexDraftQuery
|
||||
{
|
||||
/// <summary>
|
||||
/// Filter by image digest.
|
||||
/// </summary>
|
||||
public string? ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by facet ID.
|
||||
/// </summary>
|
||||
public string? FacetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Filter by review status.
|
||||
/// </summary>
|
||||
public FacetDriftVexReviewStatus? ReviewStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Include only drafts created since this time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Since { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Include only drafts created until this time.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Until { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of results to return.
|
||||
/// </summary>
|
||||
public int Limit { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Offset for pagination.
|
||||
/// </summary>
|
||||
public int Offset { get; init; } = 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Review status for facet drift VEX drafts.
|
||||
/// </summary>
|
||||
public enum FacetDriftVexReviewStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Draft is pending review.
|
||||
/// </summary>
|
||||
Pending,
|
||||
|
||||
/// <summary>
|
||||
/// Draft has been approved.
|
||||
/// </summary>
|
||||
Approved,
|
||||
|
||||
/// <summary>
|
||||
/// Draft has been rejected.
|
||||
/// </summary>
|
||||
Rejected,
|
||||
|
||||
/// <summary>
|
||||
/// Draft has expired without review.
|
||||
/// </summary>
|
||||
Expired
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Storage abstraction for facet drift VEX drafts.
|
||||
/// </summary>
|
||||
public interface IFacetDriftVexDraftStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Saves a new draft. Throws if a draft with the same ID already exists.
|
||||
/// </summary>
|
||||
Task SaveAsync(FacetDriftVexDraft draft, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Saves multiple drafts atomically.
|
||||
/// </summary>
|
||||
Task SaveBatchAsync(IEnumerable<FacetDriftVexDraft> drafts, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Finds a draft by its unique ID.
|
||||
/// </summary>
|
||||
Task<FacetDriftVexDraft?> FindByIdAsync(string draftId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Finds drafts matching the query parameters.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<FacetDriftVexDraft>> QueryAsync(FacetDriftVexDraftQuery query, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Updates a draft's review status.
|
||||
/// </summary>
|
||||
Task UpdateReviewStatusAsync(
|
||||
string draftId,
|
||||
FacetDriftVexReviewStatus status,
|
||||
string? reviewedBy = null,
|
||||
string? reviewNotes = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets pending drafts that have passed their review deadline.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<FacetDriftVexDraft>> GetOverdueAsync(DateTimeOffset asOf, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes expired drafts older than the retention period.
|
||||
/// </summary>
|
||||
Task<int> PurgeExpiredAsync(DateTimeOffset asOf, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a draft exists for the given image/facet combination.
|
||||
/// </summary>
|
||||
Task<bool> ExistsAsync(string imageDigest, string facetId, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extended draft record with review tracking.
|
||||
/// </summary>
|
||||
public sealed record FacetDriftVexDraftWithReview
|
||||
{
|
||||
/// <summary>
|
||||
/// The original draft.
|
||||
/// </summary>
|
||||
public required FacetDriftVexDraft Draft { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current review status.
|
||||
/// </summary>
|
||||
public FacetDriftVexReviewStatus ReviewStatus { get; init; } = FacetDriftVexReviewStatus.Pending;
|
||||
|
||||
/// <summary>
|
||||
/// Who reviewed the draft.
|
||||
/// </summary>
|
||||
public string? ReviewedBy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the draft was reviewed.
|
||||
/// </summary>
|
||||
public DateTimeOffset? ReviewedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Notes from the reviewer.
|
||||
/// </summary>
|
||||
public string? ReviewNotes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of <see cref="IFacetDriftVexDraftStore"/> for testing.
|
||||
/// </summary>
|
||||
public sealed class InMemoryFacetDriftVexDraftStore : IFacetDriftVexDraftStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, FacetDriftVexDraftWithReview> _drafts = new(StringComparer.Ordinal);
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="InMemoryFacetDriftVexDraftStore"/> class.
|
||||
/// </summary>
|
||||
public InMemoryFacetDriftVexDraftStore(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task SaveAsync(FacetDriftVexDraft draft, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(draft);
|
||||
|
||||
var wrapper = new FacetDriftVexDraftWithReview { Draft = draft };
|
||||
if (!_drafts.TryAdd(draft.DraftId, wrapper))
|
||||
{
|
||||
throw new InvalidOperationException($"Draft with ID '{draft.DraftId}' already exists.");
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task SaveBatchAsync(IEnumerable<FacetDriftVexDraft> drafts, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(drafts);
|
||||
|
||||
foreach (var draft in drafts)
|
||||
{
|
||||
var wrapper = new FacetDriftVexDraftWithReview { Draft = draft };
|
||||
if (!_drafts.TryAdd(draft.DraftId, wrapper))
|
||||
{
|
||||
throw new InvalidOperationException($"Draft with ID '{draft.DraftId}' already exists.");
|
||||
}
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<FacetDriftVexDraft?> FindByIdAsync(string draftId, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(draftId);
|
||||
|
||||
_drafts.TryGetValue(draftId, out var wrapper);
|
||||
return Task.FromResult<FacetDriftVexDraft?>(wrapper?.Draft);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ImmutableArray<FacetDriftVexDraft>> QueryAsync(FacetDriftVexDraftQuery query, CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(query);
|
||||
|
||||
var results = _drafts.Values.AsEnumerable();
|
||||
|
||||
if (!string.IsNullOrEmpty(query.ImageDigest))
|
||||
{
|
||||
results = results.Where(w => w.Draft.ImageDigest == query.ImageDigest);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(query.FacetId))
|
||||
{
|
||||
results = results.Where(w => w.Draft.FacetId == query.FacetId);
|
||||
}
|
||||
|
||||
if (query.ReviewStatus.HasValue)
|
||||
{
|
||||
results = results.Where(w => w.ReviewStatus == query.ReviewStatus.Value);
|
||||
}
|
||||
|
||||
if (query.Since.HasValue)
|
||||
{
|
||||
results = results.Where(w => w.Draft.GeneratedAt >= query.Since.Value);
|
||||
}
|
||||
|
||||
if (query.Until.HasValue)
|
||||
{
|
||||
results = results.Where(w => w.Draft.GeneratedAt <= query.Until.Value);
|
||||
}
|
||||
|
||||
var paged = results
|
||||
.OrderByDescending(w => w.Draft.GeneratedAt)
|
||||
.Skip(query.Offset)
|
||||
.Take(query.Limit)
|
||||
.Select(w => w.Draft)
|
||||
.ToImmutableArray();
|
||||
|
||||
return Task.FromResult(paged);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task UpdateReviewStatusAsync(
|
||||
string draftId,
|
||||
FacetDriftVexReviewStatus status,
|
||||
string? reviewedBy = null,
|
||||
string? reviewNotes = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(draftId);
|
||||
|
||||
if (!_drafts.TryGetValue(draftId, out var wrapper))
|
||||
{
|
||||
throw new KeyNotFoundException($"Draft with ID '{draftId}' not found.");
|
||||
}
|
||||
|
||||
var updated = wrapper with
|
||||
{
|
||||
ReviewStatus = status,
|
||||
ReviewedBy = reviewedBy,
|
||||
ReviewedAt = _timeProvider.GetUtcNow(),
|
||||
ReviewNotes = reviewNotes
|
||||
};
|
||||
|
||||
_drafts[draftId] = updated;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ImmutableArray<FacetDriftVexDraft>> GetOverdueAsync(DateTimeOffset asOf, CancellationToken ct = default)
|
||||
{
|
||||
var overdue = _drafts.Values
|
||||
.Where(w => w.ReviewStatus == FacetDriftVexReviewStatus.Pending)
|
||||
.Where(w => w.Draft.ReviewDeadline < asOf)
|
||||
.Select(w => w.Draft)
|
||||
.ToImmutableArray();
|
||||
|
||||
return Task.FromResult(overdue);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<int> PurgeExpiredAsync(DateTimeOffset asOf, CancellationToken ct = default)
|
||||
{
|
||||
var expiredIds = _drafts
|
||||
.Where(kvp => kvp.Value.Draft.ExpiresAt < asOf)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var id in expiredIds)
|
||||
{
|
||||
_drafts.TryRemove(id, out _);
|
||||
}
|
||||
|
||||
return Task.FromResult(expiredIds.Count);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<bool> ExistsAsync(string imageDigest, string facetId, CancellationToken ct = default)
|
||||
{
|
||||
var exists = _drafts.Values.Any(w =>
|
||||
w.Draft.ImageDigest == imageDigest &&
|
||||
w.Draft.FacetId == facetId &&
|
||||
w.ReviewStatus == FacetDriftVexReviewStatus.Pending);
|
||||
|
||||
return Task.FromResult(exists);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all drafts for testing purposes.
|
||||
/// </summary>
|
||||
public IReadOnlyCollection<FacetDriftVexDraftWithReview> GetAllForTesting()
|
||||
=> _drafts.Values.ToList();
|
||||
}
|
||||
47
src/__Libraries/StellaOps.Facet/IFacetExtractor.cs
Normal file
47
src/__Libraries/StellaOps.Facet/IFacetExtractor.cs
Normal file
@@ -0,0 +1,47 @@
|
||||
// <copyright file="IFacetExtractor.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts facet information from container images.
|
||||
/// </summary>
|
||||
public interface IFacetExtractor
|
||||
{
|
||||
/// <summary>
|
||||
/// Extract facets from a local directory (unpacked image).
|
||||
/// </summary>
|
||||
/// <param name="rootPath">Path to the unpacked image root.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Extraction result with all facet entries.</returns>
|
||||
Task<FacetExtractionResult> ExtractFromDirectoryAsync(
|
||||
string rootPath,
|
||||
FacetExtractionOptions? options = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Extract facets from a tar archive.
|
||||
/// </summary>
|
||||
/// <param name="tarStream">Stream containing the tar archive.</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Extraction result with all facet entries.</returns>
|
||||
Task<FacetExtractionResult> ExtractFromTarAsync(
|
||||
Stream tarStream,
|
||||
FacetExtractionOptions? options = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Extract facets from an OCI image layer.
|
||||
/// </summary>
|
||||
/// <param name="layerStream">Stream containing the layer (tar.gz).</param>
|
||||
/// <param name="options">Extraction options.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Extraction result with all facet entries.</returns>
|
||||
Task<FacetExtractionResult> ExtractFromOciLayerAsync(
|
||||
Stream layerStream,
|
||||
FacetExtractionOptions? options = null,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
109
src/__Libraries/StellaOps.Facet/IFacetSealStore.cs
Normal file
109
src/__Libraries/StellaOps.Facet/IFacetSealStore.cs
Normal file
@@ -0,0 +1,109 @@
|
||||
// <copyright file="IFacetSealStore.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Persistent store for <see cref="FacetSeal"/> instances.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Implementations provide storage and retrieval of facet seals for drift detection
|
||||
/// and quota enforcement. Seals are indexed by image digest and creation time.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// Sprint: SPRINT_20260105_002_003_FACET (QTA-012)
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public interface IFacetSealStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Get the most recent seal for an image digest.
|
||||
/// </summary>
|
||||
/// <param name="imageDigest">The image digest (e.g., "sha256:{hex}").</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The latest seal, or null if no seal exists for this image.</returns>
|
||||
Task<FacetSeal?> GetLatestSealAsync(string imageDigest, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get a seal by its combined Merkle root (unique identifier).
|
||||
/// </summary>
|
||||
/// <param name="combinedMerkleRoot">The seal's combined Merkle root.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The seal, or null if not found.</returns>
|
||||
Task<FacetSeal?> GetByCombinedRootAsync(string combinedMerkleRoot, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get seal history for an image digest.
|
||||
/// </summary>
|
||||
/// <param name="imageDigest">The image digest.</param>
|
||||
/// <param name="limit">Maximum number of seals to return.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Seals in descending order by creation time (most recent first).</returns>
|
||||
Task<ImmutableArray<FacetSeal>> GetHistoryAsync(
|
||||
string imageDigest,
|
||||
int limit = 10,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save a seal to the store.
|
||||
/// </summary>
|
||||
/// <param name="seal">The seal to save.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>A task representing the async operation.</returns>
|
||||
/// <exception cref="ArgumentNullException">If seal is null.</exception>
|
||||
/// <exception cref="SealAlreadyExistsException">If a seal with the same combined root exists.</exception>
|
||||
Task SaveAsync(FacetSeal seal, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Check if a seal exists for an image digest.
|
||||
/// </summary>
|
||||
/// <param name="imageDigest">The image digest.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>True if at least one seal exists.</returns>
|
||||
Task<bool> ExistsAsync(string imageDigest, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Delete all seals for an image digest.
|
||||
/// </summary>
|
||||
/// <param name="imageDigest">The image digest.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Number of seals deleted.</returns>
|
||||
Task<int> DeleteByImageAsync(string imageDigest, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Purge seals older than the specified retention period.
|
||||
/// </summary>
|
||||
/// <param name="retentionPeriod">Retention period from creation time.</param>
|
||||
/// <param name="keepAtLeast">Minimum seals to keep per image digest.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Number of seals purged.</returns>
|
||||
Task<int> PurgeOldSealsAsync(
|
||||
TimeSpan retentionPeriod,
|
||||
int keepAtLeast = 1,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when attempting to save a duplicate seal.
|
||||
/// </summary>
|
||||
public sealed class SealAlreadyExistsException : Exception
|
||||
{
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="SealAlreadyExistsException"/> class.
|
||||
/// </summary>
|
||||
/// <param name="combinedMerkleRoot">The duplicate seal's combined root.</param>
|
||||
public SealAlreadyExistsException(string combinedMerkleRoot)
|
||||
: base($"A seal with combined Merkle root '{combinedMerkleRoot}' already exists.")
|
||||
{
|
||||
CombinedMerkleRoot = combinedMerkleRoot;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the duplicate seal's combined Merkle root.
|
||||
/// </summary>
|
||||
public string CombinedMerkleRoot { get; }
|
||||
}
|
||||
228
src/__Libraries/StellaOps.Facet/InMemoryFacetSealStore.cs
Normal file
228
src/__Libraries/StellaOps.Facet/InMemoryFacetSealStore.cs
Normal file
@@ -0,0 +1,228 @@
|
||||
// <copyright file="InMemoryFacetSealStore.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of <see cref="IFacetSealStore"/> for testing.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Thread-safe but not persistent. Useful for unit tests and local development.
|
||||
/// </para>
|
||||
/// <para>
|
||||
/// Sprint: SPRINT_20260105_002_003_FACET (QTA-012)
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
public sealed class InMemoryFacetSealStore : IFacetSealStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, FacetSeal> _sealsByRoot = new();
|
||||
private readonly ConcurrentDictionary<string, SortedSet<string>> _rootsByImage = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<FacetSeal?> GetLatestSealAsync(string imageDigest, CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
|
||||
|
||||
if (!_rootsByImage.TryGetValue(imageDigest, out var roots) || roots.Count == 0)
|
||||
{
|
||||
return Task.FromResult<FacetSeal?>(null);
|
||||
}
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
// Get the most recent seal (highest creation time)
|
||||
FacetSeal? latest = null;
|
||||
foreach (var root in roots)
|
||||
{
|
||||
if (_sealsByRoot.TryGetValue(root, out var seal))
|
||||
{
|
||||
if (latest is null || seal.CreatedAt > latest.CreatedAt)
|
||||
{
|
||||
latest = seal;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(latest);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<FacetSeal?> GetByCombinedRootAsync(string combinedMerkleRoot, CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(combinedMerkleRoot);
|
||||
|
||||
_sealsByRoot.TryGetValue(combinedMerkleRoot, out var seal);
|
||||
return Task.FromResult(seal);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<ImmutableArray<FacetSeal>> GetHistoryAsync(
|
||||
string imageDigest,
|
||||
int limit = 10,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
|
||||
ArgumentOutOfRangeException.ThrowIfNegativeOrZero(limit);
|
||||
|
||||
if (!_rootsByImage.TryGetValue(imageDigest, out var roots) || roots.Count == 0)
|
||||
{
|
||||
return Task.FromResult(ImmutableArray<FacetSeal>.Empty);
|
||||
}
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
var seals = roots
|
||||
.Select(r => _sealsByRoot.TryGetValue(r, out var s) ? s : null)
|
||||
.Where(s => s is not null)
|
||||
.Cast<FacetSeal>()
|
||||
.OrderByDescending(s => s.CreatedAt)
|
||||
.Take(limit)
|
||||
.ToImmutableArray();
|
||||
|
||||
return Task.FromResult(seals);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task SaveAsync(FacetSeal seal, CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentNullException.ThrowIfNull(seal);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (_sealsByRoot.ContainsKey(seal.CombinedMerkleRoot))
|
||||
{
|
||||
throw new SealAlreadyExistsException(seal.CombinedMerkleRoot);
|
||||
}
|
||||
|
||||
_sealsByRoot[seal.CombinedMerkleRoot] = seal;
|
||||
|
||||
var roots = _rootsByImage.GetOrAdd(seal.ImageDigest, _ => new SortedSet<string>());
|
||||
lock (roots)
|
||||
{
|
||||
roots.Add(seal.CombinedMerkleRoot);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<bool> ExistsAsync(string imageDigest, CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
|
||||
|
||||
if (_rootsByImage.TryGetValue(imageDigest, out var roots))
|
||||
{
|
||||
lock (roots)
|
||||
{
|
||||
return Task.FromResult(roots.Count > 0);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<int> DeleteByImageAsync(string imageDigest, CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (!_rootsByImage.TryRemove(imageDigest, out var roots))
|
||||
{
|
||||
return Task.FromResult(0);
|
||||
}
|
||||
|
||||
int deleted = 0;
|
||||
foreach (var root in roots)
|
||||
{
|
||||
if (_sealsByRoot.TryRemove(root, out _))
|
||||
{
|
||||
deleted++;
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(deleted);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<int> PurgeOldSealsAsync(
|
||||
TimeSpan retentionPeriod,
|
||||
int keepAtLeast = 1,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
ArgumentOutOfRangeException.ThrowIfNegativeOrZero(keepAtLeast);
|
||||
|
||||
var cutoff = DateTimeOffset.UtcNow - retentionPeriod;
|
||||
int purged = 0;
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
foreach (var (imageDigest, roots) in _rootsByImage)
|
||||
{
|
||||
// Get seals for this image, sorted by creation time descending
|
||||
var seals = roots
|
||||
.Select(r => _sealsByRoot.TryGetValue(r, out var s) ? s : null)
|
||||
.Where(s => s is not null)
|
||||
.Cast<FacetSeal>()
|
||||
.OrderByDescending(s => s.CreatedAt)
|
||||
.ToList();
|
||||
|
||||
// Skip keepAtLeast, then purge old ones
|
||||
var toPurge = seals
|
||||
.Skip(keepAtLeast)
|
||||
.Where(s => s.CreatedAt < cutoff)
|
||||
.ToList();
|
||||
|
||||
foreach (var seal in toPurge)
|
||||
{
|
||||
if (_sealsByRoot.TryRemove(seal.CombinedMerkleRoot, out _))
|
||||
{
|
||||
lock (roots)
|
||||
{
|
||||
roots.Remove(seal.CombinedMerkleRoot);
|
||||
}
|
||||
|
||||
purged++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(purged);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clear all seals from the store.
|
||||
/// </summary>
|
||||
public void Clear()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_sealsByRoot.Clear();
|
||||
_rootsByImage.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the total number of seals in the store.
|
||||
/// </summary>
|
||||
public int Count => _sealsByRoot.Count;
|
||||
}
|
||||
52
src/__Libraries/StellaOps.Facet/QuotaExceededAction.cs
Normal file
52
src/__Libraries/StellaOps.Facet/QuotaExceededAction.cs
Normal file
@@ -0,0 +1,52 @@
|
||||
// <copyright file="QuotaExceededAction.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.Facet;
|
||||
|
||||
/// <summary>
|
||||
/// Action to take when a facet quota is exceeded.
|
||||
/// </summary>
|
||||
public enum QuotaExceededAction
|
||||
{
|
||||
/// <summary>
|
||||
/// Emit a warning but allow the operation to continue.
|
||||
/// </summary>
|
||||
Warn,
|
||||
|
||||
/// <summary>
|
||||
/// Block the operation (fail deployment/admission).
|
||||
/// </summary>
|
||||
Block,
|
||||
|
||||
/// <summary>
|
||||
/// Require a VEX statement to authorize the drift.
|
||||
/// </summary>
|
||||
RequireVex
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of evaluating a facet's drift against its quota.
|
||||
/// </summary>
|
||||
public enum QuotaVerdict
|
||||
{
|
||||
/// <summary>
|
||||
/// Drift is within acceptable limits.
|
||||
/// </summary>
|
||||
Ok,
|
||||
|
||||
/// <summary>
|
||||
/// Drift exceeds threshold but action is Warn.
|
||||
/// </summary>
|
||||
Warning,
|
||||
|
||||
/// <summary>
|
||||
/// Drift exceeds threshold and action is Block.
|
||||
/// </summary>
|
||||
Blocked,
|
||||
|
||||
/// <summary>
|
||||
/// Drift requires VEX authorization.
|
||||
/// </summary>
|
||||
RequiresVex
|
||||
}
|
||||
@@ -0,0 +1,143 @@
|
||||
// <copyright file="FacetSealJsonConverter.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Facet.Serialization;
|
||||
|
||||
/// <summary>
|
||||
/// JSON serialization options for facet seals.
|
||||
/// </summary>
|
||||
public static class FacetJsonOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the default JSON serializer options for facet seals.
|
||||
/// </summary>
|
||||
public static JsonSerializerOptions Default { get; } = CreateOptions();
|
||||
|
||||
/// <summary>
|
||||
/// Gets options for compact serialization (no indentation).
|
||||
/// </summary>
|
||||
public static JsonSerializerOptions Compact { get; } = CreateOptions(writeIndented: false);
|
||||
|
||||
/// <summary>
|
||||
/// Gets options for pretty-printed serialization.
|
||||
/// </summary>
|
||||
public static JsonSerializerOptions Pretty { get; } = CreateOptions(writeIndented: true);
|
||||
|
||||
private static JsonSerializerOptions CreateOptions(bool writeIndented = false)
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = writeIndented,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
options.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase));
|
||||
options.Converters.Add(new ImmutableArrayConverterFactory());
|
||||
options.Converters.Add(new ImmutableDictionaryConverterFactory());
|
||||
|
||||
return options;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converter factory for ImmutableArray{T}.
|
||||
/// </summary>
|
||||
internal sealed class ImmutableArrayConverterFactory : JsonConverterFactory
|
||||
{
|
||||
public override bool CanConvert(Type typeToConvert)
|
||||
{
|
||||
return typeToConvert.IsGenericType &&
|
||||
typeToConvert.GetGenericTypeDefinition() == typeof(ImmutableArray<>);
|
||||
}
|
||||
|
||||
public override JsonConverter CreateConverter(Type typeToConvert, JsonSerializerOptions options)
|
||||
{
|
||||
var elementType = typeToConvert.GetGenericArguments()[0];
|
||||
var converterType = typeof(ImmutableArrayConverter<>).MakeGenericType(elementType);
|
||||
return (JsonConverter)Activator.CreateInstance(converterType)!;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converter for ImmutableArray{T}.
|
||||
/// </summary>
|
||||
internal sealed class ImmutableArrayConverter<T> : JsonConverter<ImmutableArray<T>>
|
||||
{
|
||||
public override ImmutableArray<T> Read(
|
||||
ref Utf8JsonReader reader,
|
||||
Type typeToConvert,
|
||||
JsonSerializerOptions options)
|
||||
{
|
||||
if (reader.TokenType == JsonTokenType.Null)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var list = JsonSerializer.Deserialize<List<T>>(ref reader, options);
|
||||
return list is null ? [] : [.. list];
|
||||
}
|
||||
|
||||
public override void Write(
|
||||
Utf8JsonWriter writer,
|
||||
ImmutableArray<T> value,
|
||||
JsonSerializerOptions options)
|
||||
{
|
||||
JsonSerializer.Serialize(writer, value.AsEnumerable(), options);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converter factory for ImmutableDictionary{TKey,TValue}.
|
||||
/// </summary>
|
||||
internal sealed class ImmutableDictionaryConverterFactory : JsonConverterFactory
|
||||
{
|
||||
public override bool CanConvert(Type typeToConvert)
|
||||
{
|
||||
return typeToConvert.IsGenericType &&
|
||||
typeToConvert.GetGenericTypeDefinition() == typeof(ImmutableDictionary<,>);
|
||||
}
|
||||
|
||||
public override JsonConverter CreateConverter(Type typeToConvert, JsonSerializerOptions options)
|
||||
{
|
||||
var keyType = typeToConvert.GetGenericArguments()[0];
|
||||
var valueType = typeToConvert.GetGenericArguments()[1];
|
||||
var converterType = typeof(ImmutableDictionaryConverter<,>).MakeGenericType(keyType, valueType);
|
||||
return (JsonConverter)Activator.CreateInstance(converterType)!;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converter for ImmutableDictionary{TKey,TValue}.
|
||||
/// </summary>
|
||||
internal sealed class ImmutableDictionaryConverter<TKey, TValue> : JsonConverter<ImmutableDictionary<TKey, TValue>>
|
||||
where TKey : notnull
|
||||
{
|
||||
public override ImmutableDictionary<TKey, TValue>? Read(
|
||||
ref Utf8JsonReader reader,
|
||||
Type typeToConvert,
|
||||
JsonSerializerOptions options)
|
||||
{
|
||||
if (reader.TokenType == JsonTokenType.Null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var dict = JsonSerializer.Deserialize<Dictionary<TKey, TValue>>(ref reader, options);
|
||||
return dict?.ToImmutableDictionary();
|
||||
}
|
||||
|
||||
public override void Write(
|
||||
Utf8JsonWriter writer,
|
||||
ImmutableDictionary<TKey, TValue> value,
|
||||
JsonSerializerOptions options)
|
||||
{
|
||||
JsonSerializer.Serialize(writer, value.AsEnumerable().ToDictionary(kv => kv.Key, kv => kv.Value), options);
|
||||
}
|
||||
}
|
||||
18
src/__Libraries/StellaOps.Facet/StellaOps.Facet.csproj
Normal file
18
src/__Libraries/StellaOps.Facet/StellaOps.Facet.csproj
Normal file
@@ -0,0 +1,18 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Description>Facet abstraction layer for per-facet sealing and drift tracking in container images.</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="DotNet.Glob" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,91 @@
|
||||
// <copyright file="ConcurrentHlcBenchmarks.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Engines;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Benchmarks for concurrent HLC operations.
|
||||
/// Measures thread contention and scalability under parallel access.
|
||||
/// </summary>
|
||||
[MemoryDiagnoser]
|
||||
[SimpleJob(RunStrategy.Monitoring, iterationCount: 5)]
|
||||
public class ConcurrentHlcBenchmarks
|
||||
{
|
||||
private HybridLogicalClock _clock = null!;
|
||||
private InMemoryHlcStateStore _stateStore = null!;
|
||||
private FakeTimeProvider _timeProvider = null!;
|
||||
|
||||
[Params(1, 2, 4, 8)]
|
||||
public int ThreadCount { get; set; }
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
_stateStore = new InMemoryHlcStateStore();
|
||||
_clock = new HybridLogicalClock(
|
||||
_timeProvider,
|
||||
"concurrent-benchmark-node",
|
||||
_stateStore,
|
||||
NullLogger<HybridLogicalClock>.Instance);
|
||||
|
||||
// Initialize the clock
|
||||
_ = _clock.Tick();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark concurrent tick operations.
|
||||
/// Each thread generates 1000 ticks; measures total throughput and contention.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public void ConcurrentTicks_1000PerThread()
|
||||
{
|
||||
const int ticksPerThread = 1000;
|
||||
|
||||
Parallel.For(0, ThreadCount, threadIndex =>
|
||||
{
|
||||
for (int i = 0; i < ticksPerThread; i++)
|
||||
{
|
||||
_clock.Tick();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark mixed concurrent operations (ticks and receives).
|
||||
/// Simulates real-world distributed scenario.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public void ConcurrentMixed_TicksAndReceives()
|
||||
{
|
||||
const int operationsPerThread = 500;
|
||||
|
||||
Parallel.For(0, ThreadCount, threadId =>
|
||||
{
|
||||
for (int i = 0; i < operationsPerThread; i++)
|
||||
{
|
||||
if (i % 3 == 0)
|
||||
{
|
||||
// Every third operation is a receive
|
||||
var remote = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds(),
|
||||
NodeId = $"remote-node-{threadId}",
|
||||
LogicalCounter = i
|
||||
};
|
||||
_clock.Receive(remote);
|
||||
}
|
||||
else
|
||||
{
|
||||
_clock.Tick();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,106 @@
|
||||
// <copyright file="HlcBenchmarks.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Engines;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Benchmarks for Hybrid Logical Clock operations.
|
||||
/// HLC-010: Measures tick throughput and memory allocation.
|
||||
///
|
||||
/// To run: dotnet run -c Release
|
||||
/// </summary>
|
||||
[MemoryDiagnoser]
|
||||
[SimpleJob(RunStrategy.Throughput, iterationCount: 10)]
|
||||
public class HlcBenchmarks
|
||||
{
|
||||
private HybridLogicalClock _clock = null!;
|
||||
private InMemoryHlcStateStore _stateStore = null!;
|
||||
private FakeTimeProvider _timeProvider = null!;
|
||||
private HlcTimestamp _remoteTimestamp;
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
_stateStore = new InMemoryHlcStateStore();
|
||||
_clock = new HybridLogicalClock(
|
||||
_timeProvider,
|
||||
"benchmark-node-1",
|
||||
_stateStore,
|
||||
NullLogger<HybridLogicalClock>.Instance);
|
||||
|
||||
// Pre-initialize the clock
|
||||
_ = _clock.Tick();
|
||||
|
||||
// Create a remote timestamp for Receive benchmarks
|
||||
_remoteTimestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = _timeProvider.GetUtcNow().ToUnixTimeMilliseconds(),
|
||||
NodeId = "remote-node-1",
|
||||
LogicalCounter = 5
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark single Tick operation throughput.
|
||||
/// Measures the raw performance of generating a new HLC timestamp.
|
||||
/// </summary>
|
||||
[Benchmark(Baseline = true)]
|
||||
public HlcTimestamp Tick()
|
||||
{
|
||||
return _clock.Tick();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark Tick with time advancement.
|
||||
/// Simulates real-world usage where physical time advances between ticks.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public HlcTimestamp Tick_WithTimeAdvance()
|
||||
{
|
||||
_timeProvider.Advance(TimeSpan.FromMilliseconds(1));
|
||||
return _clock.Tick();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark Receive operation.
|
||||
/// Measures performance of merging a remote timestamp.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public HlcTimestamp Receive()
|
||||
{
|
||||
return _clock.Receive(_remoteTimestamp);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark batch of 100 ticks.
|
||||
/// Simulates high-throughput job scheduling scenarios.
|
||||
/// </summary>
|
||||
[Benchmark(OperationsPerInvoke = 100)]
|
||||
public void Tick_Batch100()
|
||||
{
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
_ = _clock.Tick();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark batch of 1000 ticks.
|
||||
/// Stress test for very high throughput scenarios.
|
||||
/// </summary>
|
||||
[Benchmark(OperationsPerInvoke = 1000)]
|
||||
public void Tick_Batch1000()
|
||||
{
|
||||
for (int i = 0; i < 1000; i++)
|
||||
{
|
||||
_ = _clock.Tick();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
// <copyright file="HlcTimestampBenchmarks.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using BenchmarkDotNet.Attributes;
|
||||
using BenchmarkDotNet.Engines;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Benchmarks for HlcTimestamp operations.
|
||||
/// Measures parsing, serialization, and comparison performance.
|
||||
/// </summary>
|
||||
[MemoryDiagnoser]
|
||||
[SimpleJob(RunStrategy.Throughput, iterationCount: 10)]
|
||||
public class HlcTimestampBenchmarks
|
||||
{
|
||||
private HlcTimestamp _timestamp;
|
||||
private string _sortableString = null!;
|
||||
private string _jsonString = null!;
|
||||
private HlcTimestamp[] _timestamps = null!;
|
||||
private static readonly JsonSerializerOptions JsonOptions = new();
|
||||
|
||||
[GlobalSetup]
|
||||
public void Setup()
|
||||
{
|
||||
_timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds(),
|
||||
NodeId = "scheduler-east-1",
|
||||
LogicalCounter = 42
|
||||
};
|
||||
|
||||
_sortableString = _timestamp.ToSortableString();
|
||||
_jsonString = JsonSerializer.Serialize(_timestamp, JsonOptions);
|
||||
|
||||
// Generate array of timestamps for sorting benchmark
|
||||
_timestamps = new HlcTimestamp[1000];
|
||||
var random = new Random(42);
|
||||
for (int i = 0; i < _timestamps.Length; i++)
|
||||
{
|
||||
_timestamps[i] = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = DateTimeOffset.UtcNow.ToUnixTimeMilliseconds() + random.Next(-1000, 1000),
|
||||
NodeId = $"node-{random.Next(1, 10)}",
|
||||
LogicalCounter = random.Next(0, 1000)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark ToSortableString serialization.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public string ToSortableString()
|
||||
{
|
||||
return _timestamp.ToSortableString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark Parse from sortable string.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public HlcTimestamp Parse()
|
||||
{
|
||||
return HlcTimestamp.Parse(_sortableString);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark TryParse from sortable string.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public bool TryParse()
|
||||
{
|
||||
return HlcTimestamp.TryParse(_sortableString, out _);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark full round-trip: serialize then parse.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public HlcTimestamp RoundTrip()
|
||||
{
|
||||
var str = _timestamp.ToSortableString();
|
||||
return HlcTimestamp.Parse(str);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark JSON serialization.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public string JsonSerialize()
|
||||
{
|
||||
return JsonSerializer.Serialize(_timestamp, JsonOptions);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark JSON deserialization.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public HlcTimestamp JsonDeserialize()
|
||||
{
|
||||
return JsonSerializer.Deserialize<HlcTimestamp>(_jsonString, JsonOptions);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark CompareTo operation.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public int CompareTo()
|
||||
{
|
||||
var other = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = _timestamp.PhysicalTime + 1,
|
||||
NodeId = _timestamp.NodeId,
|
||||
LogicalCounter = 0
|
||||
};
|
||||
return _timestamp.CompareTo(other);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark sorting 1000 timestamps.
|
||||
/// </summary>
|
||||
[Benchmark]
|
||||
public void Sort1000Timestamps()
|
||||
{
|
||||
var copy = (HlcTimestamp[])_timestamps.Clone();
|
||||
Array.Sort(copy);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,31 @@
|
||||
// <copyright file="Program.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using BenchmarkDotNet.Configs;
|
||||
using BenchmarkDotNet.Running;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Entry point for HLC benchmarks.
|
||||
/// </summary>
|
||||
public static class Program
|
||||
{
|
||||
/// <summary>
|
||||
/// Run benchmarks.
|
||||
/// Usage:
|
||||
/// dotnet run -c Release # Run all benchmarks
|
||||
/// dotnet run -c Release --filter "Tick" # Run only Tick benchmarks
|
||||
/// dotnet run -c Release --list flat # List available benchmarks
|
||||
/// </summary>
|
||||
public static void Main(string[] args)
|
||||
{
|
||||
var config = DefaultConfig.Instance
|
||||
.WithOptions(ConfigOptions.DisableOptimizationsValidator);
|
||||
|
||||
BenchmarkSwitcher
|
||||
.FromAssembly(typeof(Program).Assembly)
|
||||
.Run(args, config);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="BenchmarkDotNet" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,142 @@
|
||||
// <copyright file="HlcTimestampJsonConverterTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="HlcTimestampJsonConverter"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class HlcTimestampJsonConverterTests
|
||||
{
|
||||
private readonly JsonSerializerOptions _options = new()
|
||||
{
|
||||
Converters = { new HlcTimestampJsonConverter() }
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public void Serialize_ProducesSortableString()
|
||||
{
|
||||
// Arrange
|
||||
var timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 42
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(timestamp, _options);
|
||||
|
||||
// Assert
|
||||
json.Should().Be("\"1704067200000-node1-000042\"");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Deserialize_ParsesSortableString()
|
||||
{
|
||||
// Arrange
|
||||
var json = "\"1704067200000-node1-000042\"";
|
||||
|
||||
// Act
|
||||
var result = JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
|
||||
|
||||
// Assert
|
||||
result.PhysicalTime.Should().Be(1704067200000);
|
||||
result.NodeId.Should().Be("node1");
|
||||
result.LogicalCounter.Should().Be(42);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RoundTrip_PreservesValues()
|
||||
{
|
||||
// Arrange
|
||||
var original = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000,
|
||||
NodeId = "scheduler-east-1",
|
||||
LogicalCounter = 999
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(original, _options);
|
||||
var deserialized = JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().Be(original);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Deserialize_Null_ReturnsZero()
|
||||
{
|
||||
// Arrange
|
||||
var json = "null";
|
||||
|
||||
// Act
|
||||
var result = JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(default(HlcTimestamp));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Deserialize_InvalidFormat_ThrowsJsonException()
|
||||
{
|
||||
// Arrange
|
||||
var json = "\"invalid\"";
|
||||
|
||||
// Act
|
||||
var act = () => JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<JsonException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Deserialize_WrongTokenType_ThrowsJsonException()
|
||||
{
|
||||
// Arrange
|
||||
var json = "12345"; // number, not string
|
||||
|
||||
// Act
|
||||
var act = () => JsonSerializer.Deserialize<HlcTimestamp>(json, _options);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<JsonException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeInObject_WorksCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var obj = new TestWrapper
|
||||
{
|
||||
Timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
},
|
||||
Name = "Test"
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(obj, _options);
|
||||
var deserialized = JsonSerializer.Deserialize<TestWrapper>(json, _options);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Timestamp.Should().Be(obj.Timestamp);
|
||||
deserialized.Name.Should().Be(obj.Name);
|
||||
}
|
||||
|
||||
private sealed class TestWrapper
|
||||
{
|
||||
public HlcTimestamp Timestamp { get; set; }
|
||||
public string? Name { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,349 @@
|
||||
// <copyright file="HlcTimestampTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="HlcTimestamp"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class HlcTimestampTests
|
||||
{
|
||||
[Fact]
|
||||
public void ToSortableString_FormatsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000, // 2024-01-01 00:00:00 UTC
|
||||
NodeId = "scheduler-east-1",
|
||||
LogicalCounter = 42
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = timestamp.ToSortableString();
|
||||
|
||||
// Assert
|
||||
result.Should().Be("1704067200000-scheduler-east-1-000042");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_RoundTrip_PreservesValues()
|
||||
{
|
||||
// Arrange
|
||||
var original = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000,
|
||||
NodeId = "scheduler-east-1",
|
||||
LogicalCounter = 42
|
||||
};
|
||||
|
||||
// Act
|
||||
var serialized = original.ToSortableString();
|
||||
var parsed = HlcTimestamp.Parse(serialized);
|
||||
|
||||
// Assert
|
||||
parsed.Should().Be(original);
|
||||
parsed.PhysicalTime.Should().Be(original.PhysicalTime);
|
||||
parsed.NodeId.Should().Be(original.NodeId);
|
||||
parsed.LogicalCounter.Should().Be(original.LogicalCounter);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_WithHyphensInNodeId_ParsesCorrectly()
|
||||
{
|
||||
// Arrange - NodeId contains multiple hyphens
|
||||
var original = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000,
|
||||
NodeId = "scheduler-east-1-prod",
|
||||
LogicalCounter = 123
|
||||
};
|
||||
|
||||
// Act
|
||||
var serialized = original.ToSortableString();
|
||||
var parsed = HlcTimestamp.Parse(serialized);
|
||||
|
||||
// Assert
|
||||
parsed.NodeId.Should().Be("scheduler-east-1-prod");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TryParse_ValidString_ReturnsTrue()
|
||||
{
|
||||
// Act
|
||||
var result = HlcTimestamp.TryParse("1704067200000-node1-000001", out var timestamp);
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
timestamp.PhysicalTime.Should().Be(1704067200000);
|
||||
timestamp.NodeId.Should().Be("node1");
|
||||
timestamp.LogicalCounter.Should().Be(1);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(null)]
|
||||
[InlineData("")]
|
||||
[InlineData("invalid")]
|
||||
[InlineData("abc-node-001")]
|
||||
[InlineData("1234567890123--000001")]
|
||||
[InlineData("1234567890123-node-abc")]
|
||||
public void TryParse_InvalidString_ReturnsFalse(string? input)
|
||||
{
|
||||
// Act
|
||||
var result = HlcTimestamp.TryParse(input, out _);
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_InvalidString_ThrowsFormatException()
|
||||
{
|
||||
// Act
|
||||
var act = () => HlcTimestamp.Parse("invalid");
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<FormatException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_Null_ThrowsArgumentNullException()
|
||||
{
|
||||
// Act
|
||||
var act = () => HlcTimestamp.Parse(null!);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareTo_SamePhysicalTime_HigherCounterIsGreater()
|
||||
{
|
||||
// Arrange
|
||||
var earlier = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var later = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 2
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
earlier.CompareTo(later).Should().BeLessThan(0);
|
||||
later.CompareTo(earlier).Should().BeGreaterThan(0);
|
||||
(earlier < later).Should().BeTrue();
|
||||
(later > earlier).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareTo_DifferentPhysicalTime_HigherTimeIsGreater()
|
||||
{
|
||||
// Arrange
|
||||
var earlier = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 999
|
||||
};
|
||||
var later = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1001,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 0
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
earlier.CompareTo(later).Should().BeLessThan(0);
|
||||
later.CompareTo(earlier).Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareTo_SameTimeAndCounter_NodeIdBreaksTie()
|
||||
{
|
||||
// Arrange
|
||||
var a = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "aaa",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var b = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "bbb",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
a.CompareTo(b).Should().BeLessThan(0);
|
||||
b.CompareTo(a).Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareTo_Equal_ReturnsZero()
|
||||
{
|
||||
// Arrange
|
||||
var a = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var b = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
a.CompareTo(b).Should().Be(0);
|
||||
(a <= b).Should().BeTrue();
|
||||
(a >= b).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Default_HasExpectedValues()
|
||||
{
|
||||
// Act
|
||||
var zero = default(HlcTimestamp);
|
||||
|
||||
// Assert
|
||||
zero.PhysicalTime.Should().Be(0);
|
||||
zero.NodeId.Should().BeNull();
|
||||
zero.LogicalCounter.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToDateTimeOffset_ConvertsCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000, // 2024-01-01 00:00:00 UTC
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 0
|
||||
};
|
||||
|
||||
// Act
|
||||
var dateTime = timestamp.ToDateTimeOffset();
|
||||
|
||||
// Assert
|
||||
dateTime.Should().Be(new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Equality_SameValues_AreEqual()
|
||||
{
|
||||
// Arrange
|
||||
var a = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var b = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
|
||||
// Assert
|
||||
a.Should().Be(b);
|
||||
(a == b).Should().BeTrue();
|
||||
a.GetHashCode().Should().Be(b.GetHashCode());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Equality_DifferentValues_AreNotEqual()
|
||||
{
|
||||
// Arrange
|
||||
var a = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var b = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 2
|
||||
};
|
||||
|
||||
// Assert
|
||||
a.Should().NotBe(b);
|
||||
(a != b).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_ReturnsSortableString()
|
||||
{
|
||||
// Arrange
|
||||
var timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1704067200000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 42
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = timestamp.ToString();
|
||||
|
||||
// Assert
|
||||
result.Should().Be(timestamp.ToSortableString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareTo_HigherCounter_ReturnsNegative()
|
||||
{
|
||||
// Arrange
|
||||
var a = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var b = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 2
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = a.CompareTo(b);
|
||||
|
||||
// Assert
|
||||
result.Should().BeLessThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CompareTo_DefaultTimestamp_ReturnsPositiveForNonDefault()
|
||||
{
|
||||
// Arrange
|
||||
var timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var defaultTimestamp = default(HlcTimestamp);
|
||||
|
||||
// Act
|
||||
var result = timestamp.CompareTo(defaultTimestamp);
|
||||
|
||||
// Assert
|
||||
result.Should().BeGreaterThan(0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,314 @@
|
||||
// <copyright file="HybridLogicalClockTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="HybridLogicalClock"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class HybridLogicalClockTests
|
||||
{
|
||||
private const string TestNodeId = "test-node-1";
|
||||
private static readonly ILogger<HybridLogicalClock> NullLogger = NullLogger<HybridLogicalClock>.Instance;
|
||||
|
||||
[Fact]
|
||||
public void Tick_Monotonic_SuccessiveTicksAlwaysIncrease()
|
||||
{
|
||||
// Arrange
|
||||
var timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Act
|
||||
var timestamps = Enumerable.Range(0, 100)
|
||||
.Select(_ => clock.Tick())
|
||||
.ToList();
|
||||
|
||||
// Assert
|
||||
for (var i = 1; i < timestamps.Count; i++)
|
||||
{
|
||||
timestamps[i].Should().BeGreaterThan(timestamps[i - 1],
|
||||
$"Timestamp {i} should be greater than timestamp {i - 1}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tick_SamePhysicalTime_IncrementsCounter()
|
||||
{
|
||||
// Arrange
|
||||
var fixedTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FakeTimeProvider(fixedTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Act
|
||||
var first = clock.Tick();
|
||||
var second = clock.Tick();
|
||||
var third = clock.Tick();
|
||||
|
||||
// Assert
|
||||
first.LogicalCounter.Should().Be(0);
|
||||
second.LogicalCounter.Should().Be(1);
|
||||
third.LogicalCounter.Should().Be(2);
|
||||
|
||||
// All should have same physical time
|
||||
first.PhysicalTime.Should().Be(second.PhysicalTime);
|
||||
second.PhysicalTime.Should().Be(third.PhysicalTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tick_NewPhysicalTime_ResetsCounter()
|
||||
{
|
||||
// Arrange
|
||||
var startTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FakeTimeProvider(startTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Act - generate some ticks
|
||||
clock.Tick();
|
||||
clock.Tick();
|
||||
var beforeAdvance = clock.Tick();
|
||||
|
||||
// Advance time
|
||||
timeProvider.Advance(TimeSpan.FromMilliseconds(1));
|
||||
var afterAdvance = clock.Tick();
|
||||
|
||||
// Assert
|
||||
beforeAdvance.LogicalCounter.Should().Be(2);
|
||||
afterAdvance.LogicalCounter.Should().Be(0);
|
||||
afterAdvance.PhysicalTime.Should().BeGreaterThan(beforeAdvance.PhysicalTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tick_NodeId_IsCorrectlySet()
|
||||
{
|
||||
// Arrange
|
||||
var timeProvider = new FakeTimeProvider();
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, "my-custom-node", stateStore, NullLogger);
|
||||
|
||||
// Act
|
||||
var timestamp = clock.Tick();
|
||||
|
||||
// Assert
|
||||
timestamp.NodeId.Should().Be("my-custom-node");
|
||||
clock.NodeId.Should().Be("my-custom-node");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Receive_RemoteTimestampAhead_MergesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FakeTimeProvider(localTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Local tick first
|
||||
var localTick = clock.Tick();
|
||||
|
||||
// Remote timestamp is 100ms ahead
|
||||
var remote = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = localTime.AddMilliseconds(100).ToUnixTimeMilliseconds(),
|
||||
NodeId = "remote-node",
|
||||
LogicalCounter = 5
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = clock.Receive(remote);
|
||||
|
||||
// Assert
|
||||
result.PhysicalTime.Should().Be(remote.PhysicalTime);
|
||||
result.LogicalCounter.Should().Be(6); // remote counter + 1
|
||||
result.NodeId.Should().Be(TestNodeId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Receive_LocalTimestampAhead_MergesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FakeTimeProvider(localTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Generate several local ticks to advance counter
|
||||
clock.Tick();
|
||||
clock.Tick();
|
||||
var localState = clock.Tick();
|
||||
|
||||
// Remote timestamp is behind
|
||||
var remote = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = localTime.AddMilliseconds(-100).ToUnixTimeMilliseconds(),
|
||||
NodeId = "remote-node",
|
||||
LogicalCounter = 0
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = clock.Receive(remote);
|
||||
|
||||
// Assert
|
||||
result.PhysicalTime.Should().Be(localState.PhysicalTime);
|
||||
result.LogicalCounter.Should().Be(localState.LogicalCounter + 1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Receive_SamePhysicalTime_MergesCounters()
|
||||
{
|
||||
// Arrange
|
||||
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FakeTimeProvider(localTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Local tick
|
||||
clock.Tick();
|
||||
clock.Tick();
|
||||
var localState = clock.Current; // counter = 1
|
||||
|
||||
// Remote timestamp with same physical time but higher counter
|
||||
var remote = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = localTime.ToUnixTimeMilliseconds(),
|
||||
NodeId = "remote-node",
|
||||
LogicalCounter = 10
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = clock.Receive(remote);
|
||||
|
||||
// Assert
|
||||
result.PhysicalTime.Should().Be(localTime.ToUnixTimeMilliseconds());
|
||||
result.LogicalCounter.Should().Be(11); // max(local, remote) + 1
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Receive_ClockSkewExceeded_ThrowsException()
|
||||
{
|
||||
// Arrange
|
||||
var localTime = new DateTimeOffset(2024, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var timeProvider = new FakeTimeProvider(localTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var maxSkew = TimeSpan.FromMinutes(1);
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger, maxSkew);
|
||||
|
||||
// Remote timestamp is 2 minutes ahead (exceeds 1 minute tolerance)
|
||||
var remote = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = localTime.AddMinutes(2).ToUnixTimeMilliseconds(),
|
||||
NodeId = "remote-node",
|
||||
LogicalCounter = 0
|
||||
};
|
||||
|
||||
// Act
|
||||
var act = () => clock.Receive(remote);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<HlcClockSkewException>()
|
||||
.Where(e => e.MaxAllowedSkew == maxSkew)
|
||||
.Where(e => e.ActualSkew > maxSkew);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Current_ReturnsLatestState()
|
||||
{
|
||||
// Arrange
|
||||
var timeProvider = new FakeTimeProvider();
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Act
|
||||
var tick1 = clock.Tick();
|
||||
var current1 = clock.Current;
|
||||
|
||||
var tick2 = clock.Tick();
|
||||
var current2 = clock.Current;
|
||||
|
||||
// Assert
|
||||
current1.Should().Be(tick1);
|
||||
current2.Should().Be(tick2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Tick_PersistsStateToStore()
|
||||
{
|
||||
// Arrange
|
||||
var timeProvider = new FakeTimeProvider();
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock(timeProvider, TestNodeId, stateStore, NullLogger);
|
||||
|
||||
// Act
|
||||
clock.Tick();
|
||||
|
||||
// Assert - state should be persisted after tick
|
||||
stateStore.GetAllStates().Count.Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_NullTimeProvider_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange & Act
|
||||
var act = () => new HybridLogicalClock(null!, TestNodeId, new InMemoryHlcStateStore(), NullLogger);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<ArgumentNullException>()
|
||||
.WithParameterName("timeProvider");
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(null)]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Constructor_InvalidNodeId_ThrowsArgumentException(string? nodeId)
|
||||
{
|
||||
// Arrange & Act
|
||||
var act = () => new HybridLogicalClock(
|
||||
new FakeTimeProvider(),
|
||||
nodeId!,
|
||||
new InMemoryHlcStateStore(),
|
||||
NullLogger);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_NullStateStore_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange & Act
|
||||
var act = () => new HybridLogicalClock(
|
||||
new FakeTimeProvider(),
|
||||
TestNodeId,
|
||||
null!,
|
||||
NullLogger);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<ArgumentNullException>()
|
||||
.WithParameterName("stateStore");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_NullLogger_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange & Act
|
||||
var act = () => new HybridLogicalClock(
|
||||
new FakeTimeProvider(),
|
||||
TestNodeId,
|
||||
new InMemoryHlcStateStore(),
|
||||
null!);
|
||||
|
||||
// Assert
|
||||
act.Should().Throw<ArgumentNullException>()
|
||||
.WithParameterName("logger");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
// <copyright file="InMemoryHlcStateStoreTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for <see cref="InMemoryHlcStateStore"/>.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class InMemoryHlcStateStoreTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoadAsync_NoState_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
|
||||
// Act
|
||||
var result = await store.LoadAsync("node1", ct);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_ThenLoadAsync_ReturnsState()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
var timestamp = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 5
|
||||
};
|
||||
|
||||
// Act
|
||||
await store.SaveAsync(timestamp, ct);
|
||||
var result = await store.LoadAsync("node1", ct);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(timestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_GreaterTimestamp_Updates()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
var first = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 5
|
||||
};
|
||||
var second = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 10
|
||||
};
|
||||
|
||||
// Act
|
||||
await store.SaveAsync(first, ct);
|
||||
await store.SaveAsync(second, ct);
|
||||
var result = await store.LoadAsync("node1", ct);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(second);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_SmallerTimestamp_DoesNotUpdate()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
var first = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 10
|
||||
};
|
||||
var second = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 5
|
||||
};
|
||||
|
||||
// Act
|
||||
await store.SaveAsync(first, ct);
|
||||
await store.SaveAsync(second, ct);
|
||||
var result = await store.LoadAsync("node1", ct);
|
||||
|
||||
// Assert
|
||||
result.Should().Be(first);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_MultipleNodes_Isolated()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
var node1State = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 1000,
|
||||
NodeId = "node1",
|
||||
LogicalCounter = 1
|
||||
};
|
||||
var node2State = new HlcTimestamp
|
||||
{
|
||||
PhysicalTime = 2000,
|
||||
NodeId = "node2",
|
||||
LogicalCounter = 2
|
||||
};
|
||||
|
||||
// Act
|
||||
await store.SaveAsync(node1State, ct);
|
||||
await store.SaveAsync(node2State, ct);
|
||||
|
||||
// Assert
|
||||
var loaded1 = await store.LoadAsync("node1", ct);
|
||||
var loaded2 = await store.LoadAsync("node2", ct);
|
||||
|
||||
loaded1.Should().Be(node1State);
|
||||
loaded2.Should().Be(node2State);
|
||||
store.GetAllStates().Count.Should().Be(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Clear_RemovesAllState()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
await store.SaveAsync(new HlcTimestamp { PhysicalTime = 1, NodeId = "n1", LogicalCounter = 0 }, ct);
|
||||
await store.SaveAsync(new HlcTimestamp { PhysicalTime = 2, NodeId = "n2", LogicalCounter = 0 }, ct);
|
||||
|
||||
// Act
|
||||
store.Clear();
|
||||
|
||||
// Assert
|
||||
store.GetAllStates().Count.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_NullNodeId_ThrowsArgumentNullException()
|
||||
{
|
||||
// Arrange
|
||||
var store = new InMemoryHlcStateStore();
|
||||
var ct = TestContext.Current.CancellationToken;
|
||||
|
||||
// Act
|
||||
var act = () => store.LoadAsync(null!, ct);
|
||||
|
||||
// Assert
|
||||
await act.Should().ThrowAsync<ArgumentNullException>();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
77
src/__Libraries/StellaOps.HybridLogicalClock/HlcOptions.cs
Normal file
77
src/__Libraries/StellaOps.HybridLogicalClock/HlcOptions.cs
Normal file
@@ -0,0 +1,77 @@
|
||||
// <copyright file="HlcOptions.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
|
||||
namespace StellaOps.HybridLogicalClock;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the Hybrid Logical Clock.
|
||||
/// </summary>
|
||||
public sealed class HlcOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name.
|
||||
/// </summary>
|
||||
public const string SectionName = "HybridLogicalClock";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the unique node identifier.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Should be stable across restarts (e.g., "scheduler-east-1").
|
||||
/// If not set, will be auto-generated from machine name and process ID.
|
||||
/// </remarks>
|
||||
public string? NodeId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the maximum allowed clock skew.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Remote timestamps differing by more than this from local physical clock
|
||||
/// will be rejected with <see cref="HlcClockSkewException"/>.
|
||||
/// Default: 1 minute.
|
||||
/// </remarks>
|
||||
[Range(typeof(TimeSpan), "00:00:01", "01:00:00")]
|
||||
public TimeSpan MaxClockSkew { get; set; } = TimeSpan.FromMinutes(1);
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the PostgreSQL connection string for state persistence.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// If null, uses in-memory state store (state lost on restart).
|
||||
/// </remarks>
|
||||
public string? PostgresConnectionString { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the PostgreSQL schema for HLC tables.
|
||||
/// </summary>
|
||||
public string PostgresSchema { get; set; } = "scheduler";
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets whether to use in-memory state store.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// If true, state is not persisted. Useful for testing.
|
||||
/// If false and PostgresConnectionString is set, uses PostgreSQL.
|
||||
/// </remarks>
|
||||
public bool UseInMemoryStore { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the effective node ID, generating one if not configured.
|
||||
/// </summary>
|
||||
/// <returns>The node ID to use.</returns>
|
||||
public string GetEffectiveNodeId()
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(NodeId))
|
||||
{
|
||||
return NodeId;
|
||||
}
|
||||
|
||||
// Generate deterministic node ID from machine name and some unique identifier
|
||||
var machineName = Environment.MachineName.ToLowerInvariant();
|
||||
var processId = Environment.ProcessId;
|
||||
return $"{machineName}-{processId}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,44 @@
|
||||
// <copyright file="IHlcStateStore.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
namespace StellaOps.HybridLogicalClock;
|
||||
|
||||
/// <summary>
|
||||
/// Persistent storage for HLC state (survives restarts).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// Implementations should provide atomic update semantics to prevent
|
||||
/// state corruption during concurrent operations. The store is used to:
|
||||
/// </para>
|
||||
/// <list type="bullet">
|
||||
/// <item><description>Persist HLC state after each tick (fire-and-forget)</description></item>
|
||||
/// <item><description>Recover state on node restart</description></item>
|
||||
/// <item><description>Ensure clock monotonicity across restarts</description></item>
|
||||
/// </list>
|
||||
/// </remarks>
|
||||
public interface IHlcStateStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Load last persisted HLC state for node.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node identifier to load state for.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The last persisted timestamp, or null if no state exists.</returns>
|
||||
Task<HlcTimestamp?> LoadAsync(string nodeId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Persist HLC state (called after each tick).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// <para>
|
||||
/// This operation should be atomic and idempotent. Implementations may use
|
||||
/// fire-and-forget semantics with error logging for performance.
|
||||
/// </para>
|
||||
/// </remarks>
|
||||
/// <param name="timestamp">The timestamp state to persist.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>A task representing the async operation.</returns>
|
||||
Task SaveAsync(HlcTimestamp timestamp, CancellationToken ct = default);
|
||||
}
|
||||
@@ -52,31 +52,3 @@ public interface IHybridLogicalClock
|
||||
string NodeId { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Persistent storage for HLC state (survives restarts).
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Implementations should ensure atomic updates to prevent state loss
|
||||
/// during concurrent access or node failures.
|
||||
/// </remarks>
|
||||
public interface IHlcStateStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Load last persisted HLC state for node.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">Node identifier to load state for</param>
|
||||
/// <param name="ct">Cancellation token</param>
|
||||
/// <returns>Last persisted timestamp, or null if no state exists</returns>
|
||||
Task<HlcTimestamp?> LoadAsync(string nodeId, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Persist HLC state.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Called after each tick to ensure state survives restarts.
|
||||
/// Implementations may batch or debounce writes for performance.
|
||||
/// </remarks>
|
||||
/// <param name="timestamp">Current timestamp to persist</param>
|
||||
/// <param name="ct">Cancellation token</param>
|
||||
Task SaveAsync(HlcTimestamp timestamp, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
323
src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.cs
Normal file
323
src/__Libraries/StellaOps.Replay.Core.Tests/ReplayProofTests.cs
Normal file
@@ -0,0 +1,323 @@
|
||||
// <copyright file="ReplayProofTests.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Replay.Core.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Replay.Core.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for ReplayProof model and compact string generation.
|
||||
/// Sprint: SPRINT_20260105_002_001_REPLAY, Tasks RPL-011 through RPL-014.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public class ReplayProofTests
|
||||
{
|
||||
private static readonly DateTimeOffset FixedTimestamp = new(2026, 1, 5, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void FromExecutionResult_CreatesValidProof()
|
||||
{
|
||||
// Arrange & Act
|
||||
var proof = ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0",
|
||||
artifactDigest: "sha256:image123",
|
||||
signatureVerified: true,
|
||||
signatureKeyId: "key-001");
|
||||
|
||||
// Assert
|
||||
proof.BundleHash.Should().Be("sha256:abc123");
|
||||
proof.PolicyVersion.Should().Be("1.0.0");
|
||||
proof.VerdictRoot.Should().Be("sha256:def456");
|
||||
proof.VerdictMatches.Should().BeTrue();
|
||||
proof.DurationMs.Should().Be(150);
|
||||
proof.ReplayedAt.Should().Be(FixedTimestamp);
|
||||
proof.EngineVersion.Should().Be("1.0.0");
|
||||
proof.ArtifactDigest.Should().Be("sha256:image123");
|
||||
proof.SignatureVerified.Should().BeTrue();
|
||||
proof.SignatureKeyId.Should().Be("key-001");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCompactString_GeneratesCorrectFormat()
|
||||
{
|
||||
// Arrange
|
||||
var proof = CreateTestProof();
|
||||
|
||||
// Act
|
||||
var compact = proof.ToCompactString();
|
||||
|
||||
// Assert
|
||||
compact.Should().StartWith("replay-proof:");
|
||||
compact.Should().HaveLength("replay-proof:".Length + 64); // SHA-256 hex = 64 chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCompactString_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var proof1 = CreateTestProof();
|
||||
var proof2 = CreateTestProof();
|
||||
|
||||
// Act
|
||||
var compact1 = proof1.ToCompactString();
|
||||
var compact2 = proof2.ToCompactString();
|
||||
|
||||
// Assert
|
||||
compact1.Should().Be(compact2, "same inputs should produce same compact proof");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCanonicalJson_SortsKeysDeterministically()
|
||||
{
|
||||
// Arrange
|
||||
var proof = CreateTestProof();
|
||||
|
||||
// Act
|
||||
var json = proof.ToCanonicalJson();
|
||||
|
||||
// Assert - Keys should appear in alphabetical order
|
||||
var keys = ExtractJsonKeys(json);
|
||||
keys.Should().BeInAscendingOrder(StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCanonicalJson_ExcludesNullValues()
|
||||
{
|
||||
// Arrange
|
||||
var proof = ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0");
|
||||
|
||||
// Act
|
||||
var json = proof.ToCanonicalJson();
|
||||
|
||||
// Assert - Should not contain null values
|
||||
json.Should().NotContain("null");
|
||||
json.Should().NotContain("artifactDigest"); // Not set, so excluded
|
||||
json.Should().NotContain("signatureVerified"); // Not set, so excluded
|
||||
json.Should().NotContain("signatureKeyId"); // Not set, so excluded
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCanonicalJson_FormatsTimestampCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var proof = CreateTestProof();
|
||||
|
||||
// Act
|
||||
var json = proof.ToCanonicalJson();
|
||||
|
||||
// Assert - ISO 8601 UTC format
|
||||
json.Should().Contain("2026-01-05T12:00:00.000Z");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateCompactString_ReturnsTrueForValidProof()
|
||||
{
|
||||
// Arrange
|
||||
var proof = CreateTestProof();
|
||||
var compact = proof.ToCompactString();
|
||||
var canonicalJson = proof.ToCanonicalJson();
|
||||
|
||||
// Act
|
||||
var isValid = ReplayProof.ValidateCompactString(compact, canonicalJson);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateCompactString_ReturnsFalseForTamperedJson()
|
||||
{
|
||||
// Arrange
|
||||
var proof = CreateTestProof();
|
||||
var compact = proof.ToCompactString();
|
||||
var tamperedJson = proof.ToCanonicalJson().Replace("1.0.0", "2.0.0");
|
||||
|
||||
// Act
|
||||
var isValid = ReplayProof.ValidateCompactString(compact, tamperedJson);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse("tampered JSON should not validate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateCompactString_ReturnsFalseForInvalidPrefix()
|
||||
{
|
||||
// Arrange
|
||||
var canonicalJson = CreateTestProof().ToCanonicalJson();
|
||||
|
||||
// Act
|
||||
var isValid = ReplayProof.ValidateCompactString("invalid-proof:abc123", canonicalJson);
|
||||
|
||||
// Assert
|
||||
isValid.Should().BeFalse("invalid prefix should not validate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ValidateCompactString_ReturnsFalseForEmptyInputs()
|
||||
{
|
||||
// Act & Assert
|
||||
ReplayProof.ValidateCompactString("", "{}").Should().BeFalse();
|
||||
ReplayProof.ValidateCompactString("replay-proof:abc", "").Should().BeFalse();
|
||||
ReplayProof.ValidateCompactString(null!, "{}").Should().BeFalse();
|
||||
ReplayProof.ValidateCompactString("replay-proof:abc", null!).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCanonicalJson_IncludesMetadataWhenPresent()
|
||||
{
|
||||
// Arrange
|
||||
var proof = ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0",
|
||||
metadata: ImmutableDictionary<string, string>.Empty
|
||||
.Add("tenant", "acme-corp")
|
||||
.Add("project", "web-app"));
|
||||
|
||||
// Act
|
||||
var json = proof.ToCanonicalJson();
|
||||
|
||||
// Assert
|
||||
json.Should().Contain("metadata");
|
||||
json.Should().Contain("tenant");
|
||||
json.Should().Contain("acme-corp");
|
||||
json.Should().Contain("project");
|
||||
json.Should().Contain("web-app");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToCanonicalJson_SortsMetadataKeys()
|
||||
{
|
||||
// Arrange
|
||||
var proof = ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0",
|
||||
metadata: ImmutableDictionary<string, string>.Empty
|
||||
.Add("zebra", "z-value")
|
||||
.Add("alpha", "a-value")
|
||||
.Add("mike", "m-value"));
|
||||
|
||||
// Act
|
||||
var json = proof.ToCanonicalJson();
|
||||
|
||||
// Assert - Metadata keys should be in alphabetical order
|
||||
var alphaPos = json.IndexOf("alpha", StringComparison.Ordinal);
|
||||
var mikePos = json.IndexOf("mike", StringComparison.Ordinal);
|
||||
var zebraPos = json.IndexOf("zebra", StringComparison.Ordinal);
|
||||
|
||||
alphaPos.Should().BeLessThan(mikePos);
|
||||
mikePos.Should().BeLessThan(zebraPos);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromExecutionResult_ThrowsOnNullRequiredParams()
|
||||
{
|
||||
// Act & Assert
|
||||
var act1 = () => ReplayProof.FromExecutionResult(
|
||||
bundleHash: null!,
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0");
|
||||
act1.Should().Throw<ArgumentNullException>().WithParameterName("bundleHash");
|
||||
|
||||
var act2 = () => ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: null!,
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0");
|
||||
act2.Should().Throw<ArgumentNullException>().WithParameterName("policyVersion");
|
||||
|
||||
var act3 = () => ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: null!,
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0");
|
||||
act3.Should().Throw<ArgumentNullException>().WithParameterName("verdictRoot");
|
||||
|
||||
var act4 = () => ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:def456",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: null!);
|
||||
act4.Should().Throw<ArgumentNullException>().WithParameterName("engineVersion");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SchemaVersion_DefaultsTo1_0_0()
|
||||
{
|
||||
// Arrange & Act
|
||||
var proof = CreateTestProof();
|
||||
|
||||
// Assert
|
||||
proof.SchemaVersion.Should().Be("1.0.0");
|
||||
}
|
||||
|
||||
private static ReplayProof CreateTestProof()
|
||||
{
|
||||
return ReplayProof.FromExecutionResult(
|
||||
bundleHash: "sha256:abc123def456",
|
||||
policyVersion: "1.0.0",
|
||||
verdictRoot: "sha256:verdict789",
|
||||
verdictMatches: true,
|
||||
durationMs: 150,
|
||||
replayedAt: FixedTimestamp,
|
||||
engineVersion: "1.0.0",
|
||||
artifactDigest: "sha256:image123",
|
||||
signatureVerified: true,
|
||||
signatureKeyId: "key-001");
|
||||
}
|
||||
|
||||
private static List<string> ExtractJsonKeys(string json)
|
||||
{
|
||||
var keys = new List<string>();
|
||||
using var doc = JsonDocument.Parse(json);
|
||||
|
||||
foreach (var prop in doc.RootElement.EnumerateObject())
|
||||
{
|
||||
keys.Add(prop.Name);
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
}
|
||||
204
src/__Libraries/StellaOps.Replay.Core/Models/ReplayProof.cs
Normal file
204
src/__Libraries/StellaOps.Replay.Core/Models/ReplayProof.cs
Normal file
@@ -0,0 +1,204 @@
|
||||
// <copyright file="ReplayProof.cs" company="Stella Operations">
|
||||
// Copyright (c) Stella Operations. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Replay.Core.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Compact proof artifact for audit trails and ticket attachments.
|
||||
/// Captures the essential evidence that a replay was performed and matched expectations.
|
||||
/// </summary>
|
||||
public sealed record ReplayProof
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version for forward compatibility.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public string SchemaVersion { get; init; } = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 of the replay bundle used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bundleHash")]
|
||||
public required string BundleHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version used in the replay.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root of all verdict outputs.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verdictRoot")]
|
||||
public required string VerdictRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the replayed verdict matches the expected verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verdictMatches")]
|
||||
public required bool VerdictMatches { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Replay execution duration in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("durationMs")]
|
||||
public required long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when replay was performed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("replayedAt")]
|
||||
public required DateTimeOffset ReplayedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the replay engine used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("engineVersion")]
|
||||
public required string EngineVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original artifact digest (image or SBOM) that was evaluated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("artifactDigest")]
|
||||
public string? ArtifactDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature verified status (true/false/null if not present).
|
||||
/// </summary>
|
||||
[JsonPropertyName("signatureVerified")]
|
||||
public bool? SignatureVerified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID used for signature verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signatureKeyId")]
|
||||
public string? SignatureKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata (e.g., organization, project, tenant).
|
||||
/// </summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
public ImmutableDictionary<string, string>? Metadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// JSON serializer options for canonical serialization (sorted keys, no indentation).
|
||||
/// </summary>
|
||||
private static readonly JsonSerializerOptions CanonicalOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
// Note: We manually ensure sorted keys in ToCanonicalJson()
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Converts the proof to a compact string format: "replay-proof:<sha256>".
|
||||
/// The hash is computed over the canonical JSON representation.
|
||||
/// </summary>
|
||||
/// <returns>Compact proof string suitable for ticket attachments.</returns>
|
||||
public string ToCompactString()
|
||||
{
|
||||
var canonicalJson = ToCanonicalJson();
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
|
||||
var hashHex = Convert.ToHexString(hashBytes).ToLowerInvariant();
|
||||
return $"replay-proof:{hashHex}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts the proof to canonical JSON (RFC 8785 style: sorted keys, minimal whitespace).
|
||||
/// </summary>
|
||||
/// <returns>Canonical JSON string.</returns>
|
||||
public string ToCanonicalJson()
|
||||
{
|
||||
// Build ordered dictionary for canonical serialization
|
||||
var ordered = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["artifactDigest"] = ArtifactDigest,
|
||||
["bundleHash"] = BundleHash,
|
||||
["durationMs"] = DurationMs,
|
||||
["engineVersion"] = EngineVersion,
|
||||
["metadata"] = Metadata is not null && Metadata.Count > 0
|
||||
? new SortedDictionary<string, string>(Metadata, StringComparer.Ordinal)
|
||||
: null,
|
||||
["policyVersion"] = PolicyVersion,
|
||||
["replayedAt"] = ReplayedAt.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.fffZ", System.Globalization.CultureInfo.InvariantCulture),
|
||||
["schemaVersion"] = SchemaVersion,
|
||||
["signatureKeyId"] = SignatureKeyId,
|
||||
["signatureVerified"] = SignatureVerified,
|
||||
["verdictMatches"] = VerdictMatches,
|
||||
["verdictRoot"] = VerdictRoot,
|
||||
};
|
||||
|
||||
// Remove null values for canonical form
|
||||
var filtered = ordered.Where(kvp => kvp.Value is not null)
|
||||
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
|
||||
|
||||
return JsonSerializer.Serialize(filtered, CanonicalOptions);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a compact proof string and validates its hash.
|
||||
/// </summary>
|
||||
/// <param name="compactString">The compact proof string (replay-proof:<hash>).</param>
|
||||
/// <param name="originalJson">The original canonical JSON to verify against.</param>
|
||||
/// <returns>True if the hash matches, false otherwise.</returns>
|
||||
public static bool ValidateCompactString(string compactString, string originalJson)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(compactString) || string.IsNullOrWhiteSpace(originalJson))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
const string prefix = "replay-proof:";
|
||||
if (!compactString.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var expectedHash = compactString[prefix.Length..];
|
||||
var actualHashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(originalJson));
|
||||
var actualHash = Convert.ToHexString(actualHashBytes).ToLowerInvariant();
|
||||
|
||||
return string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a ReplayProof from execution results.
|
||||
/// </summary>
|
||||
public static ReplayProof FromExecutionResult(
|
||||
string bundleHash,
|
||||
string policyVersion,
|
||||
string verdictRoot,
|
||||
bool verdictMatches,
|
||||
long durationMs,
|
||||
DateTimeOffset replayedAt,
|
||||
string engineVersion,
|
||||
string? artifactDigest = null,
|
||||
bool? signatureVerified = null,
|
||||
string? signatureKeyId = null,
|
||||
ImmutableDictionary<string, string>? metadata = null)
|
||||
{
|
||||
return new ReplayProof
|
||||
{
|
||||
BundleHash = bundleHash ?? throw new ArgumentNullException(nameof(bundleHash)),
|
||||
PolicyVersion = policyVersion ?? throw new ArgumentNullException(nameof(policyVersion)),
|
||||
VerdictRoot = verdictRoot ?? throw new ArgumentNullException(nameof(verdictRoot)),
|
||||
VerdictMatches = verdictMatches,
|
||||
DurationMs = durationMs,
|
||||
ReplayedAt = replayedAt,
|
||||
EngineVersion = engineVersion ?? throw new ArgumentNullException(nameof(engineVersion)),
|
||||
ArtifactDigest = artifactDigest,
|
||||
SignatureVerified = signatureVerified,
|
||||
SignatureKeyId = signatureKeyId,
|
||||
Metadata = metadata,
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
// <copyright file="BlastRadiusTestRunner.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
|
||||
// Task: CCUT-002
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace StellaOps.TestKit.BlastRadius;
|
||||
|
||||
/// <summary>
|
||||
/// Runs tests filtered by blast radius for incident response.
|
||||
/// </summary>
|
||||
public static class BlastRadiusTestRunner
|
||||
{
|
||||
/// <summary>
|
||||
/// Get xUnit filter for specific blast radii.
|
||||
/// </summary>
|
||||
/// <param name="blastRadii">Blast radii to filter by.</param>
|
||||
/// <returns>xUnit filter string.</returns>
|
||||
/// <exception cref="ArgumentException">Thrown when no blast radii provided.</exception>
|
||||
public static string GetFilter(params string[] blastRadii)
|
||||
{
|
||||
if (blastRadii.Length == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one blast radius required", nameof(blastRadii));
|
||||
}
|
||||
|
||||
var filters = blastRadii.Select(br => $"BlastRadius={br}");
|
||||
return string.Join("|", filters);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get xUnit filter for specific blast radii (IEnumerable overload).
|
||||
/// </summary>
|
||||
/// <param name="blastRadii">Blast radii to filter by.</param>
|
||||
/// <returns>xUnit filter string.</returns>
|
||||
public static string GetFilter(IEnumerable<string> blastRadii)
|
||||
{
|
||||
return GetFilter(blastRadii.ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get the dotnet test command for specific blast radii.
|
||||
/// </summary>
|
||||
/// <param name="testProject">Test project path or solution.</param>
|
||||
/// <param name="blastRadii">Blast radii to filter by.</param>
|
||||
/// <param name="additionalArgs">Additional dotnet test arguments.</param>
|
||||
/// <returns>Complete dotnet test command.</returns>
|
||||
public static string GetCommand(
|
||||
string testProject,
|
||||
IEnumerable<string> blastRadii,
|
||||
string? additionalArgs = null)
|
||||
{
|
||||
var filter = GetFilter(blastRadii);
|
||||
var args = $"test {testProject} --filter \"{filter}\"";
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(additionalArgs))
|
||||
{
|
||||
args += $" {additionalArgs}";
|
||||
}
|
||||
|
||||
return $"dotnet {args}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Run tests for specific operational surfaces.
|
||||
/// </summary>
|
||||
/// <param name="testProject">Test project path or solution.</param>
|
||||
/// <param name="blastRadii">Blast radii to run tests for.</param>
|
||||
/// <param name="workingDirectory">Working directory for test execution.</param>
|
||||
/// <param name="timeoutMs">Timeout in milliseconds.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Test run result.</returns>
|
||||
public static async Task<TestRunResult> RunForBlastRadiiAsync(
|
||||
string testProject,
|
||||
string[] blastRadii,
|
||||
string? workingDirectory = null,
|
||||
int timeoutMs = 600000,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var filter = GetFilter(blastRadii);
|
||||
|
||||
var startInfo = new ProcessStartInfo
|
||||
{
|
||||
FileName = "dotnet",
|
||||
Arguments = $"test {testProject} --filter \"{filter}\" --logger trx --verbosity normal",
|
||||
RedirectStandardOutput = true,
|
||||
RedirectStandardError = true,
|
||||
UseShellExecute = false,
|
||||
CreateNoWindow = true
|
||||
};
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(workingDirectory))
|
||||
{
|
||||
startInfo.WorkingDirectory = workingDirectory;
|
||||
}
|
||||
|
||||
var stdout = new List<string>();
|
||||
var stderr = new List<string>();
|
||||
var sw = Stopwatch.StartNew();
|
||||
|
||||
using var process = new Process { StartInfo = startInfo };
|
||||
|
||||
process.OutputDataReceived += (_, e) =>
|
||||
{
|
||||
if (e.Data != null)
|
||||
{
|
||||
stdout.Add(e.Data);
|
||||
}
|
||||
};
|
||||
|
||||
process.ErrorDataReceived += (_, e) =>
|
||||
{
|
||||
if (e.Data != null)
|
||||
{
|
||||
stderr.Add(e.Data);
|
||||
}
|
||||
};
|
||||
|
||||
process.Start();
|
||||
process.BeginOutputReadLine();
|
||||
process.BeginErrorReadLine();
|
||||
|
||||
using var cts = CancellationTokenSource.CreateLinkedTokenSource(ct);
|
||||
cts.CancelAfter(timeoutMs);
|
||||
|
||||
try
|
||||
{
|
||||
await process.WaitForExitAsync(cts.Token);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
try
|
||||
{
|
||||
process.Kill(entireProcessTree: true);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore kill errors
|
||||
}
|
||||
|
||||
return new TestRunResult(
|
||||
ExitCode: -1,
|
||||
BlastRadii: [.. blastRadii],
|
||||
Filter: filter,
|
||||
DurationMs: sw.ElapsedMilliseconds,
|
||||
Output: [.. stdout],
|
||||
Errors: [.. stderr],
|
||||
TimedOut: true);
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
|
||||
return new TestRunResult(
|
||||
ExitCode: process.ExitCode,
|
||||
BlastRadii: [.. blastRadii],
|
||||
Filter: filter,
|
||||
DurationMs: sw.ElapsedMilliseconds,
|
||||
Output: [.. stdout],
|
||||
Errors: [.. stderr],
|
||||
TimedOut: false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Run tests for a single blast radius.
|
||||
/// </summary>
|
||||
/// <param name="testProject">Test project path or solution.</param>
|
||||
/// <param name="blastRadius">Blast radius to run tests for.</param>
|
||||
/// <param name="workingDirectory">Working directory for test execution.</param>
|
||||
/// <param name="timeoutMs">Timeout in milliseconds.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Test run result.</returns>
|
||||
public static Task<TestRunResult> RunForBlastRadiusAsync(
|
||||
string testProject,
|
||||
string blastRadius,
|
||||
string? workingDirectory = null,
|
||||
int timeoutMs = 600000,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
return RunForBlastRadiiAsync(testProject, [blastRadius], workingDirectory, timeoutMs, ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parse test results from TRX output.
|
||||
/// </summary>
|
||||
/// <param name="result">Test run result.</param>
|
||||
/// <returns>Summary of test results.</returns>
|
||||
public static TestRunSummary ParseSummary(TestRunResult result)
|
||||
{
|
||||
var summary = new TestRunSummary(
|
||||
Passed: 0,
|
||||
Failed: 0,
|
||||
Skipped: 0,
|
||||
Total: 0);
|
||||
|
||||
foreach (var line in result.Output)
|
||||
{
|
||||
// Parse dotnet test output format: "Passed: X" etc.
|
||||
if (line.Contains("Passed:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var match = System.Text.RegularExpressions.Regex.Match(line, @"Passed:\s*(\d+)");
|
||||
if (match.Success && int.TryParse(match.Groups[1].Value, out var passed))
|
||||
{
|
||||
summary = summary with { Passed = passed };
|
||||
}
|
||||
}
|
||||
|
||||
if (line.Contains("Failed:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var match = System.Text.RegularExpressions.Regex.Match(line, @"Failed:\s*(\d+)");
|
||||
if (match.Success && int.TryParse(match.Groups[1].Value, out var failed))
|
||||
{
|
||||
summary = summary with { Failed = failed };
|
||||
}
|
||||
}
|
||||
|
||||
if (line.Contains("Skipped:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var match = System.Text.RegularExpressions.Regex.Match(line, @"Skipped:\s*(\d+)");
|
||||
if (match.Success && int.TryParse(match.Groups[1].Value, out var skipped))
|
||||
{
|
||||
summary = summary with { Skipped = skipped };
|
||||
}
|
||||
}
|
||||
|
||||
if (line.Contains("Total:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var match = System.Text.RegularExpressions.Regex.Match(line, @"Total:\s*(\d+)");
|
||||
if (match.Success && int.TryParse(match.Groups[1].Value, out var total))
|
||||
{
|
||||
summary = summary with { Total = total };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return summary;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of running tests for blast radii.
|
||||
/// </summary>
|
||||
/// <param name="ExitCode">Process exit code (0 = success).</param>
|
||||
/// <param name="BlastRadii">Blast radii that were tested.</param>
|
||||
/// <param name="Filter">xUnit filter that was used.</param>
|
||||
/// <param name="DurationMs">Duration of test run in milliseconds.</param>
|
||||
/// <param name="Output">Standard output lines.</param>
|
||||
/// <param name="Errors">Standard error lines.</param>
|
||||
/// <param name="TimedOut">Whether the test run timed out.</param>
|
||||
public sealed record TestRunResult(
|
||||
int ExitCode,
|
||||
ImmutableArray<string> BlastRadii,
|
||||
string Filter,
|
||||
long DurationMs,
|
||||
ImmutableArray<string> Output,
|
||||
ImmutableArray<string> Errors,
|
||||
bool TimedOut)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a value indicating whether the test run was successful.
|
||||
/// </summary>
|
||||
public bool IsSuccess => ExitCode == 0 && !TimedOut;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of test run results.
|
||||
/// </summary>
|
||||
/// <param name="Passed">Number of passed tests.</param>
|
||||
/// <param name="Failed">Number of failed tests.</param>
|
||||
/// <param name="Skipped">Number of skipped tests.</param>
|
||||
/// <param name="Total">Total number of tests.</param>
|
||||
public sealed record TestRunSummary(
|
||||
int Passed,
|
||||
int Failed,
|
||||
int Skipped,
|
||||
int Total);
|
||||
@@ -0,0 +1,241 @@
|
||||
// <copyright file="BlastRadiusValidator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
// Sprint: SPRINT_20260105_002_005_TEST_cross_cutting
|
||||
// Task: CCUT-003
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Reflection;
|
||||
|
||||
namespace StellaOps.TestKit.BlastRadius;
|
||||
|
||||
/// <summary>
|
||||
/// Validates that tests have appropriate blast-radius annotations.
|
||||
/// </summary>
|
||||
public sealed class BlastRadiusValidator
|
||||
{
|
||||
private readonly IReadOnlyList<Type> _testClasses;
|
||||
private readonly BlastRadiusValidationConfig _config;
|
||||
|
||||
/// <summary>
|
||||
/// Initializes a new instance of the <see cref="BlastRadiusValidator"/> class.
|
||||
/// </summary>
|
||||
/// <param name="testClasses">Test classes to validate.</param>
|
||||
/// <param name="config">Validation configuration.</param>
|
||||
public BlastRadiusValidator(
|
||||
IEnumerable<Type> testClasses,
|
||||
BlastRadiusValidationConfig? config = null)
|
||||
{
|
||||
_testClasses = testClasses.ToList();
|
||||
_config = config ?? new BlastRadiusValidationConfig();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a validator from assemblies.
|
||||
/// </summary>
|
||||
/// <param name="assemblies">Assemblies to scan for test classes.</param>
|
||||
/// <param name="config">Validation configuration.</param>
|
||||
/// <returns>BlastRadiusValidator instance.</returns>
|
||||
public static BlastRadiusValidator FromAssemblies(
|
||||
IEnumerable<Assembly> assemblies,
|
||||
BlastRadiusValidationConfig? config = null)
|
||||
{
|
||||
var testClasses = assemblies
|
||||
.SelectMany(a => a.GetTypes())
|
||||
.Where(IsTestClass)
|
||||
.ToList();
|
||||
|
||||
return new BlastRadiusValidator(testClasses, config);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validate all tests that require blast-radius annotations.
|
||||
/// </summary>
|
||||
/// <returns>Validation result.</returns>
|
||||
public BlastRadiusValidationResult Validate()
|
||||
{
|
||||
var violations = new List<BlastRadiusViolation>();
|
||||
|
||||
foreach (var testClass in _testClasses)
|
||||
{
|
||||
var classTraits = GetTraits(testClass);
|
||||
|
||||
// Check if class has a category that requires blast radius
|
||||
var categories = classTraits
|
||||
.Where(t => t.Name == "Category")
|
||||
.Select(t => t.Value)
|
||||
.ToList();
|
||||
|
||||
var requiresBlastRadius = categories
|
||||
.Any(c => _config.CategoriesRequiringBlastRadius.Contains(c));
|
||||
|
||||
if (!requiresBlastRadius)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if class has blast radius annotation
|
||||
var hasBlastRadius = classTraits.Any(t => t.Name == "BlastRadius");
|
||||
|
||||
if (!hasBlastRadius)
|
||||
{
|
||||
violations.Add(new BlastRadiusViolation(
|
||||
TestClass: testClass.FullName ?? testClass.Name,
|
||||
Category: string.Join(", ", categories.Where(c => _config.CategoriesRequiringBlastRadius.Contains(c))),
|
||||
Message: $"Test class requires BlastRadius annotation because it has category: {string.Join(", ", categories.Where(c => _config.CategoriesRequiringBlastRadius.Contains(c)))}"));
|
||||
}
|
||||
}
|
||||
|
||||
return new BlastRadiusValidationResult(
|
||||
IsValid: violations.Count == 0,
|
||||
Violations: [.. violations],
|
||||
TotalTestClasses: _testClasses.Count,
|
||||
TestClassesRequiringBlastRadius: _testClasses.Count(c =>
|
||||
GetTraits(c).Any(t =>
|
||||
t.Name == "Category" &&
|
||||
_config.CategoriesRequiringBlastRadius.Contains(t.Value))));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get coverage report by blast radius.
|
||||
/// </summary>
|
||||
/// <returns>Coverage report.</returns>
|
||||
public BlastRadiusCoverageReport GetCoverageReport()
|
||||
{
|
||||
var byBlastRadius = new Dictionary<string, List<string>>();
|
||||
var uncategorized = new List<string>();
|
||||
|
||||
foreach (var testClass in _testClasses)
|
||||
{
|
||||
var traits = GetTraits(testClass);
|
||||
var blastRadii = traits
|
||||
.Where(t => t.Name == "BlastRadius")
|
||||
.Select(t => t.Value)
|
||||
.ToList();
|
||||
|
||||
if (blastRadii.Count == 0)
|
||||
{
|
||||
uncategorized.Add(testClass.FullName ?? testClass.Name);
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var br in blastRadii)
|
||||
{
|
||||
if (!byBlastRadius.TryGetValue(br, out var list))
|
||||
{
|
||||
list = [];
|
||||
byBlastRadius[br] = list;
|
||||
}
|
||||
|
||||
list.Add(testClass.FullName ?? testClass.Name);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new BlastRadiusCoverageReport(
|
||||
ByBlastRadius: byBlastRadius.ToImmutableDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => kvp.Value.ToImmutableArray()),
|
||||
UncategorizedTestClasses: [.. uncategorized],
|
||||
TotalTestClasses: _testClasses.Count);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get all blast radius values found in test classes.
|
||||
/// </summary>
|
||||
/// <returns>Distinct blast radius values.</returns>
|
||||
public IReadOnlyList<string> GetBlastRadiusValues()
|
||||
{
|
||||
return _testClasses
|
||||
.SelectMany(c => GetTraits(c))
|
||||
.Where(t => t.Name == "BlastRadius")
|
||||
.Select(t => t.Value)
|
||||
.Distinct()
|
||||
.OrderBy(v => v)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static bool IsTestClass(Type type)
|
||||
{
|
||||
if (!type.IsClass || type.IsAbstract)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check for xUnit test methods
|
||||
return type.GetMethods()
|
||||
.Any(m => m.GetCustomAttributes()
|
||||
.Any(a => a.GetType().Name is "FactAttribute" or "TheoryAttribute"));
|
||||
}
|
||||
|
||||
private static IEnumerable<(string Name, string Value)> GetTraits(Type type)
|
||||
{
|
||||
var traitAttributes = type.GetCustomAttributes()
|
||||
.Where(a => a.GetType().Name == "TraitAttribute")
|
||||
.ToList();
|
||||
|
||||
foreach (var attr in traitAttributes)
|
||||
{
|
||||
var nameProperty = attr.GetType().GetProperty("Name");
|
||||
var valueProperty = attr.GetType().GetProperty("Value");
|
||||
|
||||
if (nameProperty != null && valueProperty != null)
|
||||
{
|
||||
var name = nameProperty.GetValue(attr)?.ToString() ?? string.Empty;
|
||||
var value = valueProperty.GetValue(attr)?.ToString() ?? string.Empty;
|
||||
yield return (name, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for blast-radius validation.
|
||||
/// </summary>
|
||||
/// <param name="CategoriesRequiringBlastRadius">Categories that require blast-radius annotations.</param>
|
||||
public sealed record BlastRadiusValidationConfig(
|
||||
ImmutableArray<string> CategoriesRequiringBlastRadius = default)
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the categories requiring blast-radius annotations.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> CategoriesRequiringBlastRadius { get; init; } =
|
||||
CategoriesRequiringBlastRadius.IsDefaultOrEmpty
|
||||
? [TestCategories.Integration, TestCategories.Contract, TestCategories.Security]
|
||||
: CategoriesRequiringBlastRadius;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of blast-radius validation.
|
||||
/// </summary>
|
||||
/// <param name="IsValid">Whether all tests pass validation.</param>
|
||||
/// <param name="Violations">List of violations found.</param>
|
||||
/// <param name="TotalTestClasses">Total number of test classes examined.</param>
|
||||
/// <param name="TestClassesRequiringBlastRadius">Number of test classes that require blast-radius.</param>
|
||||
public sealed record BlastRadiusValidationResult(
|
||||
bool IsValid,
|
||||
ImmutableArray<BlastRadiusViolation> Violations,
|
||||
int TotalTestClasses,
|
||||
int TestClassesRequiringBlastRadius);
|
||||
|
||||
/// <summary>
|
||||
/// A blast-radius validation violation.
|
||||
/// </summary>
|
||||
/// <param name="TestClass">Test class with violation.</param>
|
||||
/// <param name="Category">Category requiring blast-radius.</param>
|
||||
/// <param name="Message">Violation message.</param>
|
||||
public sealed record BlastRadiusViolation(
|
||||
string TestClass,
|
||||
string Category,
|
||||
string Message);
|
||||
|
||||
/// <summary>
|
||||
/// Coverage report by blast radius.
|
||||
/// </summary>
|
||||
/// <param name="ByBlastRadius">Test classes grouped by blast radius.</param>
|
||||
/// <param name="UncategorizedTestClasses">Test classes without blast-radius annotation.</param>
|
||||
/// <param name="TotalTestClasses">Total number of test classes.</param>
|
||||
public sealed record BlastRadiusCoverageReport(
|
||||
ImmutableDictionary<string, ImmutableArray<string>> ByBlastRadius,
|
||||
ImmutableArray<string> UncategorizedTestClasses,
|
||||
int TotalTestClasses);
|
||||
@@ -128,4 +128,94 @@ public static class TestCategories
|
||||
/// Storage migration tests: Schema migrations, versioning, idempotent migration application.
|
||||
/// </summary>
|
||||
public const string StorageMigration = "StorageMigration";
|
||||
|
||||
// =========================================================================
|
||||
// Blast-Radius annotations - operational surfaces affected by test failures
|
||||
// Use these to enable targeted test runs during incidents
|
||||
// =========================================================================
|
||||
|
||||
/// <summary>
|
||||
/// Blast-radius annotations for operational surfaces.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Usage with xUnit:
|
||||
/// <code>
|
||||
/// [Fact]
|
||||
/// [Trait("Category", TestCategories.Integration)]
|
||||
/// [Trait("BlastRadius", TestCategories.BlastRadius.Auth)]
|
||||
/// [Trait("BlastRadius", TestCategories.BlastRadius.Api)]
|
||||
/// public async Task TestTokenValidation() { }
|
||||
/// </code>
|
||||
///
|
||||
/// Filter by blast radius during test runs:
|
||||
/// <code>
|
||||
/// dotnet test --filter "BlastRadius=Auth|BlastRadius=Api"
|
||||
/// </code>
|
||||
/// </remarks>
|
||||
public static class BlastRadius
|
||||
{
|
||||
/// <summary>
|
||||
/// Authentication, authorization, identity, tokens, sessions.
|
||||
/// </summary>
|
||||
public const string Auth = "Auth";
|
||||
|
||||
/// <summary>
|
||||
/// SBOM generation, vulnerability scanning, reachability analysis.
|
||||
/// </summary>
|
||||
public const string Scanning = "Scanning";
|
||||
|
||||
/// <summary>
|
||||
/// Attestation, evidence storage, audit trails, proof chains.
|
||||
/// </summary>
|
||||
public const string Evidence = "Evidence";
|
||||
|
||||
/// <summary>
|
||||
/// Regulatory compliance, GDPR, data retention, audit logging.
|
||||
/// </summary>
|
||||
public const string Compliance = "Compliance";
|
||||
|
||||
/// <summary>
|
||||
/// Advisory ingestion, VEX processing, feed synchronization.
|
||||
/// </summary>
|
||||
public const string Advisories = "Advisories";
|
||||
|
||||
/// <summary>
|
||||
/// Risk scoring, policy evaluation, verdicts.
|
||||
/// </summary>
|
||||
public const string RiskPolicy = "RiskPolicy";
|
||||
|
||||
/// <summary>
|
||||
/// Cryptographic operations, signing, verification, key management.
|
||||
/// </summary>
|
||||
public const string Crypto = "Crypto";
|
||||
|
||||
/// <summary>
|
||||
/// External integrations, webhooks, notifications.
|
||||
/// </summary>
|
||||
public const string Integrations = "Integrations";
|
||||
|
||||
/// <summary>
|
||||
/// Data persistence, database operations, storage.
|
||||
/// </summary>
|
||||
public const string Persistence = "Persistence";
|
||||
|
||||
/// <summary>
|
||||
/// API surface, contract compatibility, endpoint behavior.
|
||||
/// </summary>
|
||||
public const string Api = "Api";
|
||||
}
|
||||
|
||||
// =========================================================================
|
||||
// Schema evolution categories
|
||||
// =========================================================================
|
||||
|
||||
/// <summary>
|
||||
/// Schema evolution tests: Backward/forward compatibility across schema versions.
|
||||
/// </summary>
|
||||
public const string SchemaEvolution = "SchemaEvolution";
|
||||
|
||||
/// <summary>
|
||||
/// Config-diff tests: Behavioral delta tests for configuration changes.
|
||||
/// </summary>
|
||||
public const string ConfigDiff = "ConfigDiff";
|
||||
}
|
||||
|
||||
@@ -47,6 +47,17 @@ public interface IVerdictBuilder
|
||||
string fromCgs,
|
||||
string toCgs,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Replay a verdict from bundle inputs (frozen files).
|
||||
/// Used by CLI verify --bundle command for deterministic replay.
|
||||
/// </summary>
|
||||
/// <param name="request">Request containing paths to frozen inputs.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Replay result with computed verdict hash.</returns>
|
||||
ValueTask<VerdictReplayResult> ReplayFromBundleAsync(
|
||||
VerdictReplayRequest request,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -160,3 +171,76 @@ public enum CgsVerdictStatus
|
||||
Fixed,
|
||||
UnderInvestigation
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for replaying a verdict from a replay bundle.
|
||||
/// Used by CLI verify --bundle command.
|
||||
/// </summary>
|
||||
public sealed record VerdictReplayRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Path to the SBOM file in the bundle.
|
||||
/// </summary>
|
||||
public required string SbomPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Path to the feeds snapshot directory in the bundle (optional).
|
||||
/// </summary>
|
||||
public string? FeedsPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Path to the VEX documents directory in the bundle (optional).
|
||||
/// </summary>
|
||||
public string? VexPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Path to the policy bundle in the bundle (optional).
|
||||
/// </summary>
|
||||
public string? PolicyPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Image digest (sha256:...) being evaluated.
|
||||
/// </summary>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version digest for determinism.
|
||||
/// </summary>
|
||||
public required string PolicyDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Feed snapshot digest for determinism.
|
||||
/// </summary>
|
||||
public required string FeedSnapshotDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a bundle-based verdict replay.
|
||||
/// </summary>
|
||||
public sealed record VerdictReplayResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the replay completed successfully.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Computed verdict hash from replay.
|
||||
/// </summary>
|
||||
public string? VerdictHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if replay failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration of replay in milliseconds.
|
||||
/// </summary>
|
||||
public long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Engine version that performed the replay.
|
||||
/// </summary>
|
||||
public string? EngineVersion { get; init; }
|
||||
}
|
||||
|
||||
@@ -121,6 +121,140 @@ public sealed class VerdictBuilderService : IVerdictBuilder
|
||||
);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async ValueTask<VerdictReplayResult> ReplayFromBundleAsync(
|
||||
VerdictReplayRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
const string engineVersion = "1.0.0";
|
||||
|
||||
try
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Starting bundle replay for image={ImageDigest}, policy={PolicyDigest}",
|
||||
request.ImageDigest,
|
||||
request.PolicyDigest);
|
||||
|
||||
// 1. Load and validate SBOM
|
||||
if (!File.Exists(request.SbomPath))
|
||||
{
|
||||
return new VerdictReplayResult
|
||||
{
|
||||
Success = false,
|
||||
Error = $"SBOM file not found: {request.SbomPath}",
|
||||
DurationMs = sw.ElapsedMilliseconds,
|
||||
EngineVersion = engineVersion
|
||||
};
|
||||
}
|
||||
|
||||
var sbomContent = await File.ReadAllTextAsync(request.SbomPath, ct).ConfigureAwait(false);
|
||||
|
||||
// 2. Load VEX documents if present
|
||||
var vexDocuments = new List<string>();
|
||||
if (!string.IsNullOrEmpty(request.VexPath) && Directory.Exists(request.VexPath))
|
||||
{
|
||||
foreach (var vexFile in Directory.GetFiles(request.VexPath, "*.json", SearchOption.AllDirectories)
|
||||
.OrderBy(f => f, StringComparer.Ordinal))
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
var vexContent = await File.ReadAllTextAsync(vexFile, ct).ConfigureAwait(false);
|
||||
vexDocuments.Add(vexContent);
|
||||
}
|
||||
|
||||
_logger.LogDebug("Loaded {VexCount} VEX documents", vexDocuments.Count);
|
||||
}
|
||||
|
||||
// 3. Load reachability graph if present
|
||||
string? reachabilityJson = null;
|
||||
var reachPath = Path.Combine(Path.GetDirectoryName(request.SbomPath) ?? string.Empty, "reachability.json");
|
||||
if (File.Exists(reachPath))
|
||||
{
|
||||
reachabilityJson = await File.ReadAllTextAsync(reachPath, ct).ConfigureAwait(false);
|
||||
_logger.LogDebug("Loaded reachability graph");
|
||||
}
|
||||
|
||||
// 4. Build evidence pack
|
||||
var evidencePack = new EvidencePack(
|
||||
SbomCanonJson: sbomContent,
|
||||
VexCanonJson: vexDocuments,
|
||||
ReachabilityGraphJson: reachabilityJson,
|
||||
FeedSnapshotDigest: request.FeedSnapshotDigest);
|
||||
|
||||
// 5. Build policy lock from bundle
|
||||
var policyLock = await LoadPolicyLockAsync(request.PolicyPath, request.PolicyDigest, ct)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
// 6. Compute verdict
|
||||
var result = await BuildAsync(evidencePack, policyLock, ct).ConfigureAwait(false);
|
||||
|
||||
sw.Stop();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Bundle replay completed: cgs={CgsHash}, duration={DurationMs}ms",
|
||||
result.CgsHash,
|
||||
sw.ElapsedMilliseconds);
|
||||
|
||||
return new VerdictReplayResult
|
||||
{
|
||||
Success = true,
|
||||
VerdictHash = result.CgsHash,
|
||||
DurationMs = sw.ElapsedMilliseconds,
|
||||
EngineVersion = engineVersion
|
||||
};
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Bundle replay failed");
|
||||
sw.Stop();
|
||||
|
||||
return new VerdictReplayResult
|
||||
{
|
||||
Success = false,
|
||||
Error = ex.Message,
|
||||
DurationMs = sw.ElapsedMilliseconds,
|
||||
EngineVersion = engineVersion
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Load or generate policy lock from bundle.
|
||||
/// </summary>
|
||||
private static async ValueTask<PolicyLock> LoadPolicyLockAsync(
|
||||
string? policyPath,
|
||||
string policyDigest,
|
||||
CancellationToken ct)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(policyPath) && File.Exists(policyPath))
|
||||
{
|
||||
var policyJson = await File.ReadAllTextAsync(policyPath, ct).ConfigureAwait(false);
|
||||
var loaded = JsonSerializer.Deserialize<PolicyLock>(policyJson, CanonicalJsonOptions);
|
||||
if (loaded is not null)
|
||||
{
|
||||
return loaded;
|
||||
}
|
||||
}
|
||||
|
||||
// Default policy lock when not present in bundle
|
||||
return new PolicyLock(
|
||||
SchemaVersion: "1.0.0",
|
||||
PolicyVersion: policyDigest,
|
||||
RuleHashes: new Dictionary<string, string>
|
||||
{
|
||||
["default"] = policyDigest
|
||||
},
|
||||
EngineVersion: "1.0.0",
|
||||
GeneratedAt: DateTimeOffset.UtcNow
|
||||
);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute CGS hash using deterministic Merkle tree.
|
||||
/// </summary>
|
||||
|
||||
@@ -22,7 +22,8 @@ public class DpopProofValidatorTests
|
||||
new { typ = 123, alg = "ES256" },
|
||||
new { htm = "GET", htu = "https://api.test/resource", iat = 0, jti = "1" });
|
||||
|
||||
var validator = CreateValidator();
|
||||
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
|
||||
var validator = CreateValidator(now);
|
||||
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
@@ -37,7 +38,8 @@ public class DpopProofValidatorTests
|
||||
new { typ = "dpop+jwt", alg = 55 },
|
||||
new { htm = "GET", htu = "https://api.test/resource", iat = 0, jti = "1" });
|
||||
|
||||
var validator = CreateValidator();
|
||||
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
|
||||
var validator = CreateValidator(now);
|
||||
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
|
||||
@@ -0,0 +1,269 @@
|
||||
// <copyright file="VerdictBuilderReplayTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under AGPL-3.0-or-later.
|
||||
// </copyright>
|
||||
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Verdict.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for VerdictBuilderService.ReplayFromBundleAsync.
|
||||
/// RPL-005: Unit tests for VerdictBuilder replay with fixtures.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
public sealed class VerdictBuilderReplayTests : IDisposable
|
||||
{
|
||||
private readonly VerdictBuilderService _verdictBuilder;
|
||||
private readonly string _testDir;
|
||||
|
||||
public VerdictBuilderReplayTests()
|
||||
{
|
||||
_verdictBuilder = new VerdictBuilderService(
|
||||
NullLoggerFactory.Instance.CreateLogger<VerdictBuilderService>(),
|
||||
signer: null);
|
||||
_testDir = Path.Combine(Path.GetTempPath(), $"verdict-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_testDir);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_testDir))
|
||||
{
|
||||
Directory.Delete(_testDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private void CreateFile(string relativePath, string content)
|
||||
{
|
||||
var fullPath = Path.Combine(_testDir, relativePath.TrimStart('/'));
|
||||
var dir = Path.GetDirectoryName(fullPath);
|
||||
if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir))
|
||||
{
|
||||
Directory.CreateDirectory(dir);
|
||||
}
|
||||
|
||||
File.WriteAllText(fullPath, content, Encoding.UTF8);
|
||||
}
|
||||
|
||||
private string GetPath(string relativePath) => Path.Combine(_testDir, relativePath.TrimStart('/'));
|
||||
|
||||
#endregion
|
||||
|
||||
#region ReplayFromBundleAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_MissingSbom_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var request = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _verdictBuilder.ReplayFromBundleAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.Error.Should().Contain("SBOM file not found");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_ValidSbom_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var sbomJson = """
|
||||
{
|
||||
"bomFormat": "CycloneDX",
|
||||
"specVersion": "1.6",
|
||||
"version": 1,
|
||||
"components": []
|
||||
}
|
||||
""";
|
||||
CreateFile("inputs/sbom.json", sbomJson);
|
||||
|
||||
var request = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _verdictBuilder.ReplayFromBundleAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.VerdictHash.Should().NotBeNullOrEmpty();
|
||||
result.VerdictHash.Should().StartWith("cgs:sha256:");
|
||||
result.EngineVersion.Should().Be("1.0.0");
|
||||
result.DurationMs.Should().BeGreaterOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_WithVexDocuments_LoadsVexFiles()
|
||||
{
|
||||
// Arrange
|
||||
var sbomJson = """{"bomFormat":"CycloneDX","specVersion":"1.6","version":1,"components":[]}""";
|
||||
var vex1Json = """{"@context":"https://openvex.dev/ns/v0.2.0","@id":"test-vex-1","statements":[]}""";
|
||||
var vex2Json = """{"@context":"https://openvex.dev/ns/v0.2.0","@id":"test-vex-2","statements":[]}""";
|
||||
|
||||
CreateFile("inputs/sbom.json", sbomJson);
|
||||
CreateFile("inputs/vex/vex1.json", vex1Json);
|
||||
CreateFile("inputs/vex/vex2.json", vex2Json);
|
||||
|
||||
var request = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom.json"),
|
||||
VexPath = GetPath("inputs/vex"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _verdictBuilder.ReplayFromBundleAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.VerdictHash.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_DeterministicHash_SameInputsProduceSameHash()
|
||||
{
|
||||
// Arrange
|
||||
var sbomJson = """{"bomFormat":"CycloneDX","specVersion":"1.6","version":1,"components":[]}""";
|
||||
CreateFile("inputs/sbom.json", sbomJson);
|
||||
|
||||
var request = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
// Act - replay twice with same inputs
|
||||
var result1 = await _verdictBuilder.ReplayFromBundleAsync(request, TestContext.Current.CancellationToken);
|
||||
var result2 = await _verdictBuilder.ReplayFromBundleAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert - should produce identical hash
|
||||
result1.Success.Should().BeTrue();
|
||||
result2.Success.Should().BeTrue();
|
||||
result1.VerdictHash.Should().Be(result2.VerdictHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_DifferentInputs_ProduceDifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var sbom1 = """{"bomFormat":"CycloneDX","specVersion":"1.6","version":1,"components":[]}""";
|
||||
var sbom2 = """{"bomFormat":"CycloneDX","specVersion":"1.6","version":2,"components":[]}""";
|
||||
|
||||
CreateFile("inputs/sbom1.json", sbom1);
|
||||
CreateFile("inputs/sbom2.json", sbom2);
|
||||
|
||||
var request1 = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom1.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
var request2 = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom2.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _verdictBuilder.ReplayFromBundleAsync(request1, TestContext.Current.CancellationToken);
|
||||
var result2 = await _verdictBuilder.ReplayFromBundleAsync(request2, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result1.Success.Should().BeTrue();
|
||||
result2.Success.Should().BeTrue();
|
||||
result1.VerdictHash.Should().NotBe(result2.VerdictHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_WithPolicyLock_LoadsPolicy()
|
||||
{
|
||||
// Arrange
|
||||
var sbomJson = """{"bomFormat":"CycloneDX","specVersion":"1.6","version":1,"components":[]}""";
|
||||
var policyJson = """
|
||||
{
|
||||
"SchemaVersion": "1.0.0",
|
||||
"PolicyVersion": "custom-policy-v1",
|
||||
"RuleHashes": {"critical-rule": "sha256:abc"},
|
||||
"EngineVersion": "1.0.0",
|
||||
"GeneratedAt": "2026-01-06T00:00:00Z"
|
||||
}
|
||||
""";
|
||||
|
||||
CreateFile("inputs/sbom.json", sbomJson);
|
||||
CreateFile("inputs/policy/policy-lock.json", policyJson);
|
||||
|
||||
var request = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom.json"),
|
||||
PolicyPath = GetPath("inputs/policy/policy-lock.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _verdictBuilder.ReplayFromBundleAsync(request, TestContext.Current.CancellationToken);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.VerdictHash.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_CancellationRequested_ThrowsOperationCanceledException()
|
||||
{
|
||||
// Arrange
|
||||
var sbomJson = """{"bomFormat":"CycloneDX","specVersion":"1.6","version":1,"components":[]}""";
|
||||
CreateFile("inputs/sbom.json", sbomJson);
|
||||
|
||||
var request = new VerdictReplayRequest
|
||||
{
|
||||
SbomPath = GetPath("inputs/sbom.json"),
|
||||
ImageDigest = "sha256:abc123",
|
||||
PolicyDigest = "sha256:policy123",
|
||||
FeedSnapshotDigest = "sha256:feeds123"
|
||||
};
|
||||
|
||||
using var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<OperationCanceledException>(
|
||||
() => _verdictBuilder.ReplayFromBundleAsync(request, cts.Token).AsTask());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromBundleAsync_NullRequest_ThrowsArgumentNullException()
|
||||
{
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<ArgumentNullException>(
|
||||
() => _verdictBuilder.ReplayFromBundleAsync(null!, TestContext.Current.CancellationToken).AsTask());
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user