todays product advirories implemented
This commit is contained in:
@@ -0,0 +1,952 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DeterminismReplayGoldenTests.cs
|
||||
// Sprint: SPRINT_20260117_014_CLI_determinism_replay
|
||||
// Task: DRP-004 - Golden file tests for replay verification
|
||||
// Description: Golden output tests for HLC, Timeline, and Score Explain commands
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Cli.Tests.GoldenOutput;
|
||||
|
||||
/// <summary>
|
||||
/// Golden output tests for determinism and replay CLI commands.
|
||||
/// Verifies that HLC status, timeline query, and score explain
|
||||
/// produce consistent, deterministic outputs matching frozen snapshots.
|
||||
/// Task: DRP-004
|
||||
///
|
||||
/// HOW TO UPDATE GOLDEN FILES:
|
||||
/// 1. Run tests to identify failures
|
||||
/// 2. Review the actual output carefully to ensure changes are intentional
|
||||
/// 3. Update the expected golden snapshot in this file
|
||||
/// 4. Document the reason for the change in the commit message
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Unit)]
|
||||
[Trait("Category", "GoldenOutput")]
|
||||
[Trait("Category", "Determinism")]
|
||||
[Trait("Sprint", "20260117-014")]
|
||||
public sealed class DeterminismReplayGoldenTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
// Fixed timestamp for deterministic tests
|
||||
private static readonly DateTimeOffset FixedTimestamp = new(2026, 1, 15, 10, 30, 0, TimeSpan.Zero);
|
||||
|
||||
#region HLC Status Golden Tests (DRP-001)
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that HLC status JSON output matches golden snapshot.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void HlcStatus_Json_MatchesGolden()
|
||||
{
|
||||
// Arrange
|
||||
var status = CreateFrozenHlcStatus();
|
||||
|
||||
// Act
|
||||
var actual = JsonSerializer.Serialize(status, JsonOptions).NormalizeLf();
|
||||
|
||||
// Assert - Golden snapshot
|
||||
var expected = """
|
||||
{
|
||||
"nodeId": "node-01",
|
||||
"healthy": true,
|
||||
"currentTimestamp": {
|
||||
"physical": 1736937000000,
|
||||
"logical": 42,
|
||||
"nodeId": "node-01"
|
||||
},
|
||||
"formattedTimestamp": "2026-01-15T10:30:00.000Z:0042:node-01",
|
||||
"clockDriftMs": 3.2,
|
||||
"ntpServer": "time.google.com",
|
||||
"lastNtpSync": "2026-01-15T10:25:00+00:00",
|
||||
"clusterState": {
|
||||
"totalNodes": 3,
|
||||
"syncedNodes": 3,
|
||||
"peers": [
|
||||
{
|
||||
"nodeId": "node-01",
|
||||
"status": "synced",
|
||||
"lastSeen": "2026-01-15T10:30:00+00:00",
|
||||
"driftMs": 0
|
||||
},
|
||||
{
|
||||
"nodeId": "node-02",
|
||||
"status": "synced",
|
||||
"lastSeen": "2026-01-15T10:29:58+00:00",
|
||||
"driftMs": 1.5
|
||||
},
|
||||
{
|
||||
"nodeId": "node-03",
|
||||
"status": "synced",
|
||||
"lastSeen": "2026-01-15T10:29:55+00:00",
|
||||
"driftMs": 2.8
|
||||
}
|
||||
]
|
||||
},
|
||||
"checkedAt": "2026-01-15T10:30:00+00:00"
|
||||
}
|
||||
""".NormalizeLf();
|
||||
|
||||
actual.Should().Be(expected);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that HLC status text output matches golden snapshot.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void HlcStatus_Text_MatchesGolden()
|
||||
{
|
||||
// Arrange
|
||||
var status = CreateFrozenHlcStatus();
|
||||
|
||||
// Act
|
||||
var actual = FormatHlcStatusText(status, verbose: false).NormalizeLf();
|
||||
|
||||
// Assert - Golden snapshot
|
||||
var expected = """
|
||||
HLC Node Status
|
||||
===============
|
||||
|
||||
Health: [OK] Healthy
|
||||
Node ID: node-01
|
||||
HLC Timestamp: 2026-01-15T10:30:00.000Z:0042:node-01
|
||||
Clock Drift: 3.2 ms
|
||||
NTP Server: time.google.com
|
||||
Last NTP Sync: 2026-01-15 10:25:00Z
|
||||
|
||||
Cluster State:
|
||||
Nodes: 3/3 synced
|
||||
|
||||
Checked At: 2026-01-15 10:30:00Z
|
||||
""".NormalizeLf();
|
||||
|
||||
actual.Trim().Should().Be(expected.Trim());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that HLC status verbose text output matches golden snapshot.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void HlcStatus_TextVerbose_MatchesGolden()
|
||||
{
|
||||
// Arrange
|
||||
var status = CreateFrozenHlcStatus();
|
||||
|
||||
// Act
|
||||
var actual = FormatHlcStatusText(status, verbose: true).NormalizeLf();
|
||||
|
||||
// Assert - Should contain peer table
|
||||
actual.Should().Contain("Peer Status:");
|
||||
actual.Should().Contain("node-01");
|
||||
actual.Should().Contain("node-02");
|
||||
actual.Should().Contain("node-03");
|
||||
actual.Should().Contain("synced");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that HLC status produces consistent output across multiple runs.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void HlcStatus_SameInputs_ProducesIdenticalOutput()
|
||||
{
|
||||
// Arrange
|
||||
var status1 = CreateFrozenHlcStatus();
|
||||
var status2 = CreateFrozenHlcStatus();
|
||||
|
||||
// Act
|
||||
var json1 = JsonSerializer.Serialize(status1, JsonOptions);
|
||||
var json2 = JsonSerializer.Serialize(status2, JsonOptions);
|
||||
|
||||
// Assert
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Timeline Query Golden Tests (DRP-002)
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that timeline query JSON output matches golden snapshot.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void TimelineQuery_Json_MatchesGolden()
|
||||
{
|
||||
// Arrange
|
||||
var result = CreateFrozenTimelineResult();
|
||||
|
||||
// Act
|
||||
var actual = JsonSerializer.Serialize(result, JsonOptions).NormalizeLf();
|
||||
|
||||
// Assert - Golden snapshot
|
||||
var expected = """
|
||||
{
|
||||
"events": [
|
||||
{
|
||||
"hlcTimestamp": "1737000000000000001",
|
||||
"type": "scan",
|
||||
"entityId": "sha256:abc123def456",
|
||||
"actor": "scanner-agent-1",
|
||||
"details": "SBOM generated"
|
||||
},
|
||||
{
|
||||
"hlcTimestamp": "1737000000000000002",
|
||||
"type": "attest",
|
||||
"entityId": "sha256:abc123def456",
|
||||
"actor": "attestor-1",
|
||||
"details": "SLSA provenance created"
|
||||
},
|
||||
{
|
||||
"hlcTimestamp": "1737000000000000003",
|
||||
"type": "policy",
|
||||
"entityId": "sha256:abc123def456",
|
||||
"actor": "policy-engine",
|
||||
"details": "Policy evaluation: PASS"
|
||||
},
|
||||
{
|
||||
"hlcTimestamp": "1737000000000000004",
|
||||
"type": "promote",
|
||||
"entityId": "release-2026.01.15-001",
|
||||
"actor": "ops@example.com",
|
||||
"details": "Promoted from dev to stage"
|
||||
}
|
||||
],
|
||||
"pagination": {
|
||||
"offset": 0,
|
||||
"limit": 50,
|
||||
"total": 4,
|
||||
"hasMore": false
|
||||
},
|
||||
"determinismHash": "sha256:a1b2c3d4e5f67890"
|
||||
}
|
||||
""".NormalizeLf();
|
||||
|
||||
actual.Should().Be(expected);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that timeline query table output matches golden snapshot.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void TimelineQuery_Table_MatchesGolden()
|
||||
{
|
||||
// Arrange
|
||||
var events = CreateFrozenTimelineEvents();
|
||||
|
||||
// Act
|
||||
var actual = FormatTimelineTable(events).NormalizeLf();
|
||||
|
||||
// Assert - Golden snapshot header
|
||||
actual.Should().Contain("Timeline Events");
|
||||
actual.Should().Contain("HLC Timestamp");
|
||||
actual.Should().Contain("Type");
|
||||
actual.Should().Contain("Entity");
|
||||
actual.Should().Contain("Actor");
|
||||
|
||||
// Events should appear in HLC timestamp order
|
||||
var scanIndex = actual.IndexOf("scan");
|
||||
var attestIndex = actual.IndexOf("attest");
|
||||
var policyIndex = actual.IndexOf("policy");
|
||||
var promoteIndex = actual.IndexOf("promote");
|
||||
|
||||
scanIndex.Should().BeLessThan(attestIndex);
|
||||
attestIndex.Should().BeLessThan(policyIndex);
|
||||
policyIndex.Should().BeLessThan(promoteIndex);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that timeline events are sorted by HLC timestamp.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void TimelineQuery_EventsAreSortedByHlcTimestamp()
|
||||
{
|
||||
// Arrange - Events in random order
|
||||
var events = new List<TimelineEvent>
|
||||
{
|
||||
new() { HlcTimestamp = "1737000000000000004", Type = "promote", EntityId = "release-001", Actor = "ops", Details = "Promoted" },
|
||||
new() { HlcTimestamp = "1737000000000000001", Type = "scan", EntityId = "sha256:abc", Actor = "scanner", Details = "Scanned" },
|
||||
new() { HlcTimestamp = "1737000000000000003", Type = "policy", EntityId = "sha256:abc", Actor = "policy", Details = "Evaluated" },
|
||||
new() { HlcTimestamp = "1737000000000000002", Type = "attest", EntityId = "sha256:abc", Actor = "attestor", Details = "Attested" }
|
||||
};
|
||||
|
||||
// Act - Sort as timeline query would
|
||||
var sorted = events.OrderBy(e => e.HlcTimestamp).ToList();
|
||||
|
||||
// Assert - Events should be in ascending HLC timestamp order
|
||||
sorted[0].Type.Should().Be("scan");
|
||||
sorted[1].Type.Should().Be("attest");
|
||||
sorted[2].Type.Should().Be("policy");
|
||||
sorted[3].Type.Should().Be("promote");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that timeline determinism hash is consistent.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void TimelineQuery_DeterminismHashIsConsistent()
|
||||
{
|
||||
// Arrange
|
||||
var events1 = CreateFrozenTimelineEvents();
|
||||
var events2 = CreateFrozenTimelineEvents();
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeTimelineDeterminismHash(events1);
|
||||
var hash2 = ComputeTimelineDeterminismHash(events2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
hash1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Explain Golden Tests (DRP-003)
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that score explain JSON output matches golden snapshot.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreExplain_Json_MatchesGolden()
|
||||
{
|
||||
// Arrange
|
||||
var explanation = CreateFrozenScoreExplanation();
|
||||
EnsureScoreExplanationDeterminism(explanation);
|
||||
|
||||
// Act
|
||||
var actual = JsonSerializer.Serialize(explanation, JsonOptions).NormalizeLf();
|
||||
|
||||
// Assert - Golden snapshot
|
||||
var expected = """
|
||||
{
|
||||
"digest": "sha256:abc123def456789012345678901234567890123456789012345678901234",
|
||||
"finalScore": 7.500000,
|
||||
"scoreBreakdown": {
|
||||
"baseScore": 8.100000,
|
||||
"cvssScore": 8.100000,
|
||||
"epssAdjustment": -0.300000,
|
||||
"reachabilityAdjustment": -0.200000,
|
||||
"vexAdjustment": -0.100000,
|
||||
"factors": [
|
||||
{
|
||||
"name": "CVSS Base Score",
|
||||
"value": 8.100000,
|
||||
"weight": 0.400000,
|
||||
"contribution": 3.240000,
|
||||
"source": "NVD",
|
||||
"details": "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:N/SI:N/SA:N"
|
||||
},
|
||||
{
|
||||
"name": "EPSS Probability",
|
||||
"value": 0.150000,
|
||||
"weight": 0.200000,
|
||||
"contribution": 1.500000,
|
||||
"source": "FIRST EPSS",
|
||||
"details": "15th percentile exploitation probability"
|
||||
},
|
||||
{
|
||||
"name": "KEV Status",
|
||||
"value": 0.000000,
|
||||
"weight": 0.050000,
|
||||
"contribution": 0.000000,
|
||||
"source": "CISA KEV",
|
||||
"details": "Not in Known Exploited Vulnerabilities catalog"
|
||||
},
|
||||
{
|
||||
"name": "Reachability",
|
||||
"value": 0.700000,
|
||||
"weight": 0.250000,
|
||||
"contribution": 1.750000,
|
||||
"source": "Static Analysis",
|
||||
"details": "Reachable via 2 call paths; confidence 0.7"
|
||||
},
|
||||
{
|
||||
"name": "VEX Status",
|
||||
"value": 0.000000,
|
||||
"weight": 0.100000,
|
||||
"contribution": 0.000000,
|
||||
"source": "OpenVEX",
|
||||
"details": "No VEX statement available"
|
||||
}
|
||||
]
|
||||
},
|
||||
"computedAt": "2026-01-15T10:30:00+00:00",
|
||||
"profileUsed": "stella-default-v1",
|
||||
"determinismHash": "sha256:b3c4d5e6f7a89012"
|
||||
}
|
||||
""".NormalizeLf();
|
||||
|
||||
actual.Should().Be(expected);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that score explain factors are sorted alphabetically.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreExplain_FactorsAreSortedAlphabetically()
|
||||
{
|
||||
// Arrange - Create explanation with unsorted factors
|
||||
var explanation = CreateFrozenScoreExplanation();
|
||||
|
||||
// Act
|
||||
EnsureScoreExplanationDeterminism(explanation);
|
||||
|
||||
// Assert - Factors should be sorted by name
|
||||
var factorNames = explanation.ScoreBreakdown.Factors.Select(f => f.Name).ToList();
|
||||
factorNames.Should().BeInAscendingOrder();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that floating-point values have stable 6-decimal precision.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreExplain_FloatingPointValuesHaveStablePrecision()
|
||||
{
|
||||
// Arrange
|
||||
var explanation = CreateFrozenScoreExplanation();
|
||||
EnsureScoreExplanationDeterminism(explanation);
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(explanation, JsonOptions);
|
||||
|
||||
// Assert - Values should have 6 decimal places
|
||||
json.Should().Contain("7.500000");
|
||||
json.Should().Contain("8.100000");
|
||||
json.Should().Contain("-0.300000");
|
||||
json.Should().Contain("-0.200000");
|
||||
json.Should().Contain("-0.100000");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that score explain determinism hash is consistent.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreExplain_DeterminismHashIsConsistent()
|
||||
{
|
||||
// Arrange
|
||||
var exp1 = CreateFrozenScoreExplanation();
|
||||
var exp2 = CreateFrozenScoreExplanation();
|
||||
|
||||
// Act
|
||||
EnsureScoreExplanationDeterminism(exp1);
|
||||
EnsureScoreExplanationDeterminism(exp2);
|
||||
|
||||
// Assert
|
||||
exp1.DeterminismHash.Should().Be(exp2.DeterminismHash);
|
||||
exp1.DeterminismHash.Should().StartWith("sha256:");
|
||||
exp1.DeterminismHash.Should().HaveLength(24); // "sha256:" + 16 hex chars
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that same inputs produce identical outputs (byte-for-byte).
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreExplain_SameInputs_ProducesIdenticalOutput()
|
||||
{
|
||||
// Arrange
|
||||
var exp1 = CreateFrozenScoreExplanation();
|
||||
var exp2 = CreateFrozenScoreExplanation();
|
||||
|
||||
// Act
|
||||
EnsureScoreExplanationDeterminism(exp1);
|
||||
EnsureScoreExplanationDeterminism(exp2);
|
||||
|
||||
var json1 = JsonSerializer.Serialize(exp1, JsonOptions);
|
||||
var json2 = JsonSerializer.Serialize(exp2, JsonOptions);
|
||||
|
||||
// Assert
|
||||
json1.Should().Be(json2);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that different inputs produce different determinism hashes.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void ScoreExplain_DifferentInputs_ProducesDifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var exp1 = CreateFrozenScoreExplanation();
|
||||
var exp2 = CreateFrozenScoreExplanation();
|
||||
exp2.FinalScore = 8.0; // Different score
|
||||
|
||||
// Act
|
||||
EnsureScoreExplanationDeterminism(exp1);
|
||||
EnsureScoreExplanationDeterminism(exp2);
|
||||
|
||||
// Assert
|
||||
exp1.DeterminismHash.Should().NotBe(exp2.DeterminismHash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Platform Golden Tests
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that JSON output uses consistent line endings (LF).
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void AllOutputs_UseConsistentLineEndings()
|
||||
{
|
||||
// Arrange
|
||||
var hlcStatus = CreateFrozenHlcStatus();
|
||||
var timeline = CreateFrozenTimelineResult();
|
||||
var score = CreateFrozenScoreExplanation();
|
||||
|
||||
// Act
|
||||
var hlcJson = JsonSerializer.Serialize(hlcStatus, JsonOptions);
|
||||
var timelineJson = JsonSerializer.Serialize(timeline, JsonOptions);
|
||||
var scoreJson = JsonSerializer.Serialize(score, JsonOptions);
|
||||
|
||||
// Assert - Should not contain CRLF
|
||||
hlcJson.Should().NotContain("\r\n");
|
||||
timelineJson.Should().NotContain("\r\n");
|
||||
scoreJson.Should().NotContain("\r\n");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that timestamps use ISO 8601 format with UTC.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void AllOutputs_TimestampsAreIso8601Utc()
|
||||
{
|
||||
// Arrange
|
||||
var hlcStatus = CreateFrozenHlcStatus();
|
||||
var score = CreateFrozenScoreExplanation();
|
||||
|
||||
// Act
|
||||
var hlcJson = JsonSerializer.Serialize(hlcStatus, JsonOptions);
|
||||
var scoreJson = JsonSerializer.Serialize(score, JsonOptions);
|
||||
|
||||
// Assert - Timestamps should be ISO 8601 with UTC offset
|
||||
hlcJson.Should().MatchRegex(@"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00");
|
||||
scoreJson.Should().MatchRegex(@"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\+00:00");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that digests are lowercase hex.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void AllOutputs_DigestsAreLowercaseHex()
|
||||
{
|
||||
// Arrange
|
||||
var score = CreateFrozenScoreExplanation();
|
||||
EnsureScoreExplanationDeterminism(score);
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(score, JsonOptions);
|
||||
|
||||
// Assert - Digests should be lowercase
|
||||
json.Should().Contain("sha256:abc123def456");
|
||||
json.Should().NotMatchRegex("sha256:[A-F]");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static HlcStatus CreateFrozenHlcStatus()
|
||||
{
|
||||
return new HlcStatus
|
||||
{
|
||||
NodeId = "node-01",
|
||||
Healthy = true,
|
||||
CurrentTimestamp = new HlcTimestamp
|
||||
{
|
||||
Physical = 1736937000000,
|
||||
Logical = 42,
|
||||
NodeId = "node-01"
|
||||
},
|
||||
FormattedTimestamp = "2026-01-15T10:30:00.000Z:0042:node-01",
|
||||
ClockDriftMs = 3.2,
|
||||
NtpServer = "time.google.com",
|
||||
LastNtpSync = FixedTimestamp.AddMinutes(-5),
|
||||
ClusterState = new HlcClusterState
|
||||
{
|
||||
TotalNodes = 3,
|
||||
SyncedNodes = 3,
|
||||
Peers =
|
||||
[
|
||||
new HlcPeerStatus { NodeId = "node-01", Status = "synced", LastSeen = FixedTimestamp, DriftMs = 0 },
|
||||
new HlcPeerStatus { NodeId = "node-02", Status = "synced", LastSeen = FixedTimestamp.AddSeconds(-2), DriftMs = 1.5 },
|
||||
new HlcPeerStatus { NodeId = "node-03", Status = "synced", LastSeen = FixedTimestamp.AddSeconds(-5), DriftMs = 2.8 }
|
||||
]
|
||||
},
|
||||
CheckedAt = FixedTimestamp
|
||||
};
|
||||
}
|
||||
|
||||
private static string FormatHlcStatusText(HlcStatus status, bool verbose)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("HLC Node Status");
|
||||
sb.AppendLine("===============");
|
||||
sb.AppendLine();
|
||||
|
||||
var healthStatus = status.Healthy ? "[OK] Healthy" : "[FAIL] Unhealthy";
|
||||
sb.AppendLine($"Health: {healthStatus}");
|
||||
sb.AppendLine($"Node ID: {status.NodeId}");
|
||||
sb.AppendLine($"HLC Timestamp: {status.FormattedTimestamp}");
|
||||
sb.AppendLine($"Clock Drift: {status.ClockDriftMs} ms");
|
||||
sb.AppendLine($"NTP Server: {status.NtpServer}");
|
||||
sb.AppendLine($"Last NTP Sync: {status.LastNtpSync:yyyy-MM-dd HH:mm:ssZ}");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("Cluster State:");
|
||||
sb.AppendLine($" Nodes: {status.ClusterState.SyncedNodes}/{status.ClusterState.TotalNodes} synced");
|
||||
|
||||
if (verbose && status.ClusterState.Peers.Count > 0)
|
||||
{
|
||||
sb.AppendLine();
|
||||
sb.AppendLine("Peer Status:");
|
||||
foreach (var peer in status.ClusterState.Peers)
|
||||
{
|
||||
sb.AppendLine($" {peer.NodeId}: {peer.Status} (drift: {peer.DriftMs} ms)");
|
||||
}
|
||||
}
|
||||
|
||||
sb.AppendLine();
|
||||
sb.AppendLine($"Checked At: {status.CheckedAt:yyyy-MM-dd HH:mm:ssZ}");
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private static List<TimelineEvent> CreateFrozenTimelineEvents()
|
||||
{
|
||||
return
|
||||
[
|
||||
new TimelineEvent { HlcTimestamp = "1737000000000000001", Type = "scan", EntityId = "sha256:abc123def456", Actor = "scanner-agent-1", Details = "SBOM generated" },
|
||||
new TimelineEvent { HlcTimestamp = "1737000000000000002", Type = "attest", EntityId = "sha256:abc123def456", Actor = "attestor-1", Details = "SLSA provenance created" },
|
||||
new TimelineEvent { HlcTimestamp = "1737000000000000003", Type = "policy", EntityId = "sha256:abc123def456", Actor = "policy-engine", Details = "Policy evaluation: PASS" },
|
||||
new TimelineEvent { HlcTimestamp = "1737000000000000004", Type = "promote", EntityId = "release-2026.01.15-001", Actor = "ops@example.com", Details = "Promoted from dev to stage" }
|
||||
];
|
||||
}
|
||||
|
||||
private static TimelineQueryResult CreateFrozenTimelineResult()
|
||||
{
|
||||
var events = CreateFrozenTimelineEvents();
|
||||
return new TimelineQueryResult
|
||||
{
|
||||
Events = events,
|
||||
Pagination = new PaginationInfo
|
||||
{
|
||||
Offset = 0,
|
||||
Limit = 50,
|
||||
Total = events.Count,
|
||||
HasMore = false
|
||||
},
|
||||
DeterminismHash = ComputeTimelineDeterminismHash(events)
|
||||
};
|
||||
}
|
||||
|
||||
private static string FormatTimelineTable(List<TimelineEvent> events)
|
||||
{
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("Timeline Events");
|
||||
sb.AppendLine("===============");
|
||||
sb.AppendLine();
|
||||
sb.AppendLine($"{"HLC Timestamp",-28} {"Type",-12} {"Entity",-25} {"Actor"}");
|
||||
sb.AppendLine(new string('-', 90));
|
||||
|
||||
foreach (var evt in events.OrderBy(e => e.HlcTimestamp))
|
||||
{
|
||||
var entityTrunc = evt.EntityId.Length > 23 ? evt.EntityId[..23] + ".." : evt.EntityId;
|
||||
sb.AppendLine($"{evt.HlcTimestamp,-28} {evt.Type,-12} {entityTrunc,-25} {evt.Actor}");
|
||||
}
|
||||
|
||||
sb.AppendLine();
|
||||
sb.AppendLine($"Total: {events.Count} events");
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
private static string ComputeTimelineDeterminismHash(IEnumerable<TimelineEvent> events)
|
||||
{
|
||||
var combined = string.Join("|", events.OrderBy(e => e.HlcTimestamp).Select(e => $"{e.HlcTimestamp}:{e.Type}:{e.EntityId}"));
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)[..16]}";
|
||||
}
|
||||
|
||||
private static ScoreExplanation CreateFrozenScoreExplanation()
|
||||
{
|
||||
return new ScoreExplanation
|
||||
{
|
||||
Digest = "sha256:abc123def456789012345678901234567890123456789012345678901234",
|
||||
FinalScore = 7.5,
|
||||
ScoreBreakdown = new ScoreBreakdown
|
||||
{
|
||||
BaseScore = 8.1,
|
||||
CvssScore = 8.1,
|
||||
EpssAdjustment = -0.3,
|
||||
ReachabilityAdjustment = -0.2,
|
||||
VexAdjustment = -0.1,
|
||||
Factors =
|
||||
[
|
||||
new ScoreFactor
|
||||
{
|
||||
Name = "CVSS Base Score",
|
||||
Value = 8.1,
|
||||
Weight = 0.4,
|
||||
Contribution = 3.24,
|
||||
Source = "NVD",
|
||||
Details = "CVSS:4.0/AV:N/AC:L/AT:N/PR:N/UI:N/VC:H/VI:H/VA:H/SC:N/SI:N/SA:N"
|
||||
},
|
||||
new ScoreFactor
|
||||
{
|
||||
Name = "EPSS Probability",
|
||||
Value = 0.15,
|
||||
Weight = 0.2,
|
||||
Contribution = 1.5,
|
||||
Source = "FIRST EPSS",
|
||||
Details = "15th percentile exploitation probability"
|
||||
},
|
||||
new ScoreFactor
|
||||
{
|
||||
Name = "Reachability",
|
||||
Value = 0.7,
|
||||
Weight = 0.25,
|
||||
Contribution = 1.75,
|
||||
Source = "Static Analysis",
|
||||
Details = "Reachable via 2 call paths; confidence 0.7"
|
||||
},
|
||||
new ScoreFactor
|
||||
{
|
||||
Name = "VEX Status",
|
||||
Value = 0,
|
||||
Weight = 0.1,
|
||||
Contribution = 0,
|
||||
Source = "OpenVEX",
|
||||
Details = "No VEX statement available"
|
||||
},
|
||||
new ScoreFactor
|
||||
{
|
||||
Name = "KEV Status",
|
||||
Value = 0,
|
||||
Weight = 0.05,
|
||||
Contribution = 0,
|
||||
Source = "CISA KEV",
|
||||
Details = "Not in Known Exploited Vulnerabilities catalog"
|
||||
}
|
||||
]
|
||||
},
|
||||
ComputedAt = FixedTimestamp,
|
||||
ProfileUsed = "stella-default-v1"
|
||||
};
|
||||
}
|
||||
|
||||
private static void EnsureScoreExplanationDeterminism(ScoreExplanation explanation)
|
||||
{
|
||||
// Sort factors alphabetically by name for deterministic output
|
||||
explanation.ScoreBreakdown.Factors = [.. explanation.ScoreBreakdown.Factors.OrderBy(f => f.Name, StringComparer.Ordinal)];
|
||||
|
||||
// Compute determinism hash from stable representation
|
||||
var hashInput = $"{explanation.Digest}|{explanation.FinalScore:F6}|{explanation.ProfileUsed}|{string.Join(",", explanation.ScoreBreakdown.Factors.Select(f => $"{f.Name}:{f.Value:F6}:{f.Weight:F6}"))}";
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(hashInput));
|
||||
explanation.DeterminismHash = $"sha256:{Convert.ToHexStringLower(hashBytes)[..16]}";
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Models
|
||||
|
||||
private sealed class HlcStatus
|
||||
{
|
||||
[JsonPropertyName("nodeId")]
|
||||
public string NodeId { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("healthy")]
|
||||
public bool Healthy { get; set; }
|
||||
|
||||
[JsonPropertyName("currentTimestamp")]
|
||||
public HlcTimestamp CurrentTimestamp { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("formattedTimestamp")]
|
||||
public string FormattedTimestamp { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("clockDriftMs")]
|
||||
public double ClockDriftMs { get; set; }
|
||||
|
||||
[JsonPropertyName("ntpServer")]
|
||||
public string NtpServer { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("lastNtpSync")]
|
||||
public DateTimeOffset LastNtpSync { get; set; }
|
||||
|
||||
[JsonPropertyName("clusterState")]
|
||||
public HlcClusterState ClusterState { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("checkedAt")]
|
||||
public DateTimeOffset CheckedAt { get; set; }
|
||||
}
|
||||
|
||||
private sealed class HlcTimestamp
|
||||
{
|
||||
[JsonPropertyName("physical")]
|
||||
public long Physical { get; set; }
|
||||
|
||||
[JsonPropertyName("logical")]
|
||||
public int Logical { get; set; }
|
||||
|
||||
[JsonPropertyName("nodeId")]
|
||||
public string NodeId { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
private sealed class HlcClusterState
|
||||
{
|
||||
[JsonPropertyName("totalNodes")]
|
||||
public int TotalNodes { get; set; }
|
||||
|
||||
[JsonPropertyName("syncedNodes")]
|
||||
public int SyncedNodes { get; set; }
|
||||
|
||||
[JsonPropertyName("peers")]
|
||||
public List<HlcPeerStatus> Peers { get; set; } = [];
|
||||
}
|
||||
|
||||
private sealed class HlcPeerStatus
|
||||
{
|
||||
[JsonPropertyName("nodeId")]
|
||||
public string NodeId { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("status")]
|
||||
public string Status { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("lastSeen")]
|
||||
public DateTimeOffset LastSeen { get; set; }
|
||||
|
||||
[JsonPropertyName("driftMs")]
|
||||
public double DriftMs { get; set; }
|
||||
}
|
||||
|
||||
private sealed class TimelineQueryResult
|
||||
{
|
||||
[JsonPropertyName("events")]
|
||||
public List<TimelineEvent> Events { get; set; } = [];
|
||||
|
||||
[JsonPropertyName("pagination")]
|
||||
public PaginationInfo Pagination { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("determinismHash")]
|
||||
public string DeterminismHash { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
private sealed class PaginationInfo
|
||||
{
|
||||
[JsonPropertyName("offset")]
|
||||
public int Offset { get; set; }
|
||||
|
||||
[JsonPropertyName("limit")]
|
||||
public int Limit { get; set; }
|
||||
|
||||
[JsonPropertyName("total")]
|
||||
public int Total { get; set; }
|
||||
|
||||
[JsonPropertyName("hasMore")]
|
||||
public bool HasMore { get; set; }
|
||||
}
|
||||
|
||||
private sealed class TimelineEvent
|
||||
{
|
||||
[JsonPropertyName("hlcTimestamp")]
|
||||
public string HlcTimestamp { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("entityId")]
|
||||
public string EntityId { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("actor")]
|
||||
public string Actor { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("details")]
|
||||
public string Details { get; set; } = string.Empty;
|
||||
}
|
||||
|
||||
private sealed class ScoreExplanation
|
||||
{
|
||||
[JsonPropertyName("digest")]
|
||||
public string Digest { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("finalScore")]
|
||||
public double FinalScore { get; set; }
|
||||
|
||||
[JsonPropertyName("scoreBreakdown")]
|
||||
public ScoreBreakdown ScoreBreakdown { get; set; } = new();
|
||||
|
||||
[JsonPropertyName("computedAt")]
|
||||
public DateTimeOffset ComputedAt { get; set; }
|
||||
|
||||
[JsonPropertyName("profileUsed")]
|
||||
public string ProfileUsed { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("determinismHash")]
|
||||
public string? DeterminismHash { get; set; }
|
||||
}
|
||||
|
||||
private sealed class ScoreBreakdown
|
||||
{
|
||||
[JsonPropertyName("baseScore")]
|
||||
public double BaseScore { get; set; }
|
||||
|
||||
[JsonPropertyName("cvssScore")]
|
||||
public double CvssScore { get; set; }
|
||||
|
||||
[JsonPropertyName("epssAdjustment")]
|
||||
public double EpssAdjustment { get; set; }
|
||||
|
||||
[JsonPropertyName("reachabilityAdjustment")]
|
||||
public double ReachabilityAdjustment { get; set; }
|
||||
|
||||
[JsonPropertyName("vexAdjustment")]
|
||||
public double VexAdjustment { get; set; }
|
||||
|
||||
[JsonPropertyName("factors")]
|
||||
public List<ScoreFactor> Factors { get; set; } = [];
|
||||
}
|
||||
|
||||
private sealed class ScoreFactor
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("value")]
|
||||
public double Value { get; set; }
|
||||
|
||||
[JsonPropertyName("weight")]
|
||||
public double Weight { get; set; }
|
||||
|
||||
[JsonPropertyName("contribution")]
|
||||
public double Contribution { get; set; }
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public string Source { get; set; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("details")]
|
||||
public string? Details { get; set; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for string normalization in golden tests.
|
||||
/// </summary>
|
||||
internal static class GoldenTestStringExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Normalize line endings to LF for cross-platform consistency.
|
||||
/// </summary>
|
||||
public static string NormalizeLf(this string input)
|
||||
{
|
||||
return input.Replace("\r\n", "\n");
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user