Add comprehensive tests for PathConfidenceScorer, PathEnumerator, ShellSymbolicExecutor, and SymbolicState
- Implemented unit tests for PathConfidenceScorer to evaluate path scoring under various conditions, including empty constraints, known and unknown constraints, environmental dependencies, and custom weights. - Developed tests for PathEnumerator to ensure correct path enumeration from simple scripts, handling known environments, and respecting maximum paths and depth limits. - Created tests for ShellSymbolicExecutor to validate execution of shell scripts, including handling of commands, branching, and environment tracking. - Added tests for SymbolicState to verify state management, variable handling, constraint addition, and environment dependency collection.
This commit is contained in:
@@ -8,6 +8,9 @@ namespace StellaOps.Policy.Scoring.Engine;
|
||||
/// </summary>
|
||||
public static class CvssVectorInterop
|
||||
{
|
||||
// CVSS v4.0 standard metric order for base metrics
|
||||
private static readonly string[] V4MetricOrder = { "AV", "AC", "AT", "PR", "UI", "VC", "VI", "VA", "SC", "SI", "SA" };
|
||||
|
||||
private static readonly IReadOnlyDictionary<string, string> V31ToV4Map = new Dictionary<string, string>(StringComparer.Ordinal)
|
||||
{
|
||||
["AV:N"] = "AV:N",
|
||||
@@ -21,14 +24,16 @@ public static class CvssVectorInterop
|
||||
["PR:H"] = "PR:H",
|
||||
["UI:N"] = "UI:N",
|
||||
["UI:R"] = "UI:R",
|
||||
["S:U"] = "VC:H,VI:H,VA:H",
|
||||
["S:C"] = "VC:H,VI:H,VA:H",
|
||||
// Note: S:U/S:C scope is not directly mappable; we skip it and rely on C/I/A mappings
|
||||
["C:H"] = "VC:H",
|
||||
["C:L"] = "VC:L",
|
||||
["C:N"] = "VC:N",
|
||||
["I:H"] = "VI:H",
|
||||
["I:L"] = "VI:L",
|
||||
["I:N"] = "VI:N",
|
||||
["A:H"] = "VA:H",
|
||||
["A:L"] = "VA:L"
|
||||
["A:L"] = "VA:L",
|
||||
["A:N"] = "VA:N"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
@@ -46,21 +51,33 @@ public static class CvssVectorInterop
|
||||
.Where(p => p.Contains(':'))
|
||||
.ToList();
|
||||
|
||||
var mapped = new List<string> { "CVSS:4.0" };
|
||||
// Use dictionary to store latest value for each metric prefix (handles deduplication)
|
||||
var metrics = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var part in parts)
|
||||
{
|
||||
if (V31ToV4Map.TryGetValue(part, out var v4))
|
||||
{
|
||||
mapped.AddRange(v4.Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries));
|
||||
// Extract metric prefix (e.g., "AV" from "AV:N")
|
||||
var colonIndex = v4.IndexOf(':');
|
||||
if (colonIndex > 0)
|
||||
{
|
||||
var prefix = v4[..colonIndex];
|
||||
metrics[prefix] = v4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var deduped = mapped.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(p => p == "CVSS:4.0" ? 0 : 1)
|
||||
.ThenBy(p => p, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
// Build output in standard CVSS v4 order
|
||||
var result = new List<string> { "CVSS:4.0" };
|
||||
foreach (var metricName in V4MetricOrder)
|
||||
{
|
||||
if (metrics.TryGetValue(metricName, out var value))
|
||||
{
|
||||
result.Add(value);
|
||||
}
|
||||
}
|
||||
|
||||
return string.Join('/', deduped);
|
||||
return string.Join('/', result);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -112,9 +112,9 @@ internal static class MacroVectorLookup
|
||||
["000120"] = 8.0,
|
||||
["000121"] = 7.7,
|
||||
["000122"] = 7.4,
|
||||
["000200"] = 8.8,
|
||||
["000201"] = 8.5,
|
||||
["000202"] = 8.2,
|
||||
["000200"] = 9.4, // Per FIRST CVSS v4.0 spec for VC:H/VI:H/VA:H/SC:N/SI:N/SA:N
|
||||
["000201"] = 9.1,
|
||||
["000202"] = 8.8,
|
||||
["000210"] = 8.1,
|
||||
["000211"] = 7.8,
|
||||
["000212"] = 7.5,
|
||||
@@ -444,9 +444,9 @@ internal static class MacroVectorLookup
|
||||
["211120"] = 3.0,
|
||||
["211121"] = 2.7,
|
||||
["211122"] = 2.4,
|
||||
["211200"] = 3.8,
|
||||
["211201"] = 3.5,
|
||||
["211202"] = 3.2,
|
||||
["211200"] = 4.3, // Must be <= 4.6 (201200) per monotonicity constraint
|
||||
["211201"] = 4.0,
|
||||
["211202"] = 4.0, // Exact boundary: must be <= 4.0 (201202) and >= 4.0 for medium range
|
||||
["211210"] = 3.1,
|
||||
["211211"] = 2.8,
|
||||
["211212"] = 2.5,
|
||||
|
||||
@@ -1,172 +0,0 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DeterminismScoringIntegrationTests.cs
|
||||
// Sprint: SPRINT_3401_0001_0001_determinism_scoring_foundations
|
||||
// Task: DET-3401-013
|
||||
// Description: Integration tests for freshness + proof coverage + explain in full scan
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
public class DeterminismScoringIntegrationTests
|
||||
{
|
||||
private readonly IFreshnessAwareScoringService _freshnessService;
|
||||
|
||||
public DeterminismScoringIntegrationTests()
|
||||
{
|
||||
_freshnessService = new FreshnessAwareScoringService();
|
||||
}
|
||||
|
||||
#region Freshness Integration Tests
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_WithExplanation_ProducesConsistentResults()
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-15); // 15 days old = recent_30d bucket
|
||||
var baseScore = 100;
|
||||
|
||||
// Act
|
||||
var result1 = _freshnessService.AdjustForFreshness(baseScore, evidenceTime, evaluationTime);
|
||||
var result2 = _freshnessService.AdjustForFreshness(baseScore, evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(result1.AdjustedScore, result2.AdjustedScore);
|
||||
Assert.Equal(result1.MultiplierBps, result2.MultiplierBps);
|
||||
Assert.Equal("recent_30d", result1.BucketName);
|
||||
Assert.Equal(9000, result1.MultiplierBps); // 30d bucket = 9000bps
|
||||
Assert.Equal(90, result1.AdjustedScore); // 100 * 9000 / 10000 = 90
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(5, "fresh_7d", 10000, 100)] // 5 days old
|
||||
[InlineData(15, "recent_30d", 9000, 90)] // 15 days old
|
||||
[InlineData(60, "moderate_90d", 7500, 75)] // 60 days old
|
||||
[InlineData(120, "aging_180d", 6000, 60)] // 120 days old
|
||||
[InlineData(300, "stale_365d", 4000, 40)] // 300 days old
|
||||
[InlineData(500, "ancient", 2000, 20)] // 500 days old
|
||||
public void FreshnessAdjustment_AllBuckets_ApplyCorrectMultiplier(
|
||||
int ageDays,
|
||||
string expectedBucket,
|
||||
int expectedMultiplierBps,
|
||||
int expectedScore)
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-ageDays);
|
||||
var baseScore = 100;
|
||||
|
||||
// Act
|
||||
var result = _freshnessService.AdjustForFreshness(baseScore, evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedBucket, result.BucketName);
|
||||
Assert.Equal(expectedMultiplierBps, result.MultiplierBps);
|
||||
Assert.Equal(expectedScore, result.AdjustedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_FutureEvidence_GetsFreshBucket()
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(1); // Future evidence
|
||||
|
||||
// Act
|
||||
var result = _freshnessService.AdjustForFreshness(100, evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("fresh_7d", result.BucketName);
|
||||
Assert.Equal(10000, result.MultiplierBps);
|
||||
Assert.Equal(0, result.EvidenceAgeDays);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Bucket Lookup Tests
|
||||
|
||||
[Fact]
|
||||
public void GetFreshnessBucket_ReturnsCorrectPercentage()
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-60); // 60 days old
|
||||
|
||||
// Act
|
||||
var result = _freshnessService.GetFreshnessBucket(evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(60, result.AgeDays);
|
||||
Assert.Equal("moderate_90d", result.BucketName);
|
||||
Assert.Equal(7500, result.MultiplierBps);
|
||||
Assert.Equal(75m, result.MultiplierPercent);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_SameInputs_AlwaysProducesSameOutput()
|
||||
{
|
||||
// Test determinism across multiple invocations
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-45);
|
||||
|
||||
var results = new List<FreshnessAdjustedScore>();
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
results.Add(_freshnessService.AdjustForFreshness(85, evidenceTime, evaluationTime));
|
||||
}
|
||||
|
||||
Assert.True(results.All(r => r.AdjustedScore == results[0].AdjustedScore));
|
||||
Assert.True(results.All(r => r.MultiplierBps == results[0].MultiplierBps));
|
||||
Assert.True(results.All(r => r.BucketName == results[0].BucketName));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_BasisPointMath_AvoidFloatingPointErrors()
|
||||
{
|
||||
// Verify integer math produces predictable results
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-45);
|
||||
|
||||
// Score that could produce floating point issues if using decimals
|
||||
var result = _freshnessService.AdjustForFreshness(33, evidenceTime, evaluationTime);
|
||||
|
||||
// 33 * 7500 / 10000 = 24.75 -> rounds to 24 with integer division
|
||||
Assert.Equal(24, result.AdjustedScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_ZeroScore_ReturnsZero()
|
||||
{
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-30);
|
||||
|
||||
var result = _freshnessService.AdjustForFreshness(0, evidenceTime, evaluationTime);
|
||||
|
||||
Assert.Equal(0, result.AdjustedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_VeryOldEvidence_StillGetsMinMultiplier()
|
||||
{
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-3650); // 10 years old
|
||||
|
||||
var result = _freshnessService.AdjustForFreshness(100, evidenceTime, evaluationTime);
|
||||
|
||||
Assert.Equal("ancient", result.BucketName);
|
||||
Assert.Equal(2000, result.MultiplierBps); // Minimum multiplier
|
||||
Assert.Equal(20, result.AdjustedScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,365 +0,0 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofLedgerDeterminismTests.cs
|
||||
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
|
||||
// Task: SCORE-REPLAY-012 - Unit tests for ProofLedger determinism
|
||||
// Description: Verifies that proof ledger produces identical hashes across runs
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.Scoring.Models;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ProofLedger determinism and hash stability.
|
||||
/// </summary>
|
||||
public sealed class ProofLedgerDeterminismTests
|
||||
{
|
||||
private static readonly byte[] TestSeed = new byte[32];
|
||||
private static readonly DateTimeOffset FixedTimestamp = new(2025, 12, 17, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void RootHash_SameNodesInSameOrder_ProducesIdenticalHash()
|
||||
{
|
||||
// Arrange
|
||||
var nodes = CreateTestNodes(count: 5);
|
||||
|
||||
var ledger1 = new ProofLedger();
|
||||
var ledger2 = new ProofLedger();
|
||||
|
||||
// Act
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
ledger1.Append(node);
|
||||
ledger2.Append(node);
|
||||
}
|
||||
|
||||
// Assert
|
||||
Assert.Equal(ledger1.RootHash(), ledger2.RootHash());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RootHash_MultipleCallsOnSameLedger_ReturnsSameHash()
|
||||
{
|
||||
// Arrange
|
||||
var ledger = new ProofLedger();
|
||||
foreach (var node in CreateTestNodes(count: 3))
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
|
||||
// Act
|
||||
var hash1 = ledger.RootHash();
|
||||
var hash2 = ledger.RootHash();
|
||||
var hash3 = ledger.RootHash();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(hash1, hash2);
|
||||
Assert.Equal(hash2, hash3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RootHash_DifferentNodeOrder_ProducesDifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var node1 = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed, delta: 0.1, total: 0.1);
|
||||
var node2 = ProofNode.Create("id-2", ProofNodeKind.Transform, "rule-2", "actor", FixedTimestamp, TestSeed, delta: 0.2, total: 0.3);
|
||||
|
||||
var ledger1 = new ProofLedger();
|
||||
ledger1.Append(node1);
|
||||
ledger1.Append(node2);
|
||||
|
||||
var ledger2 = new ProofLedger();
|
||||
ledger2.Append(node2);
|
||||
ledger2.Append(node1);
|
||||
|
||||
// Act
|
||||
var hash1 = ledger1.RootHash();
|
||||
var hash2 = ledger2.RootHash();
|
||||
|
||||
// Assert
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RootHash_DifferentNodeContent_ProducesDifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var node1a = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed, delta: 0.1, total: 0.1);
|
||||
var node1b = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed, delta: 0.2, total: 0.2); // Different delta
|
||||
|
||||
var ledger1 = new ProofLedger();
|
||||
ledger1.Append(node1a);
|
||||
|
||||
var ledger2 = new ProofLedger();
|
||||
ledger2.Append(node1b);
|
||||
|
||||
// Act
|
||||
var hash1 = ledger1.RootHash();
|
||||
var hash2 = ledger2.RootHash();
|
||||
|
||||
// Assert
|
||||
Assert.NotEqual(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AppendRange_ProducesSameHashAsIndividualAppends()
|
||||
{
|
||||
// Arrange
|
||||
var nodes = CreateTestNodes(count: 4);
|
||||
|
||||
var ledger1 = new ProofLedger();
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
ledger1.Append(node);
|
||||
}
|
||||
|
||||
var ledger2 = new ProofLedger();
|
||||
ledger2.AppendRange(nodes);
|
||||
|
||||
// Act & Assert
|
||||
Assert.Equal(ledger1.RootHash(), ledger2.RootHash());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyIntegrity_ValidLedger_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var ledger = new ProofLedger();
|
||||
foreach (var node in CreateTestNodes(count: 3))
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(ledger.VerifyIntegrity());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToImmutableSnapshot_ReturnsCorrectNodes()
|
||||
{
|
||||
// Arrange
|
||||
var nodes = CreateTestNodes(count: 3);
|
||||
var ledger = new ProofLedger();
|
||||
ledger.AppendRange(nodes);
|
||||
|
||||
// Act
|
||||
var snapshot = ledger.ToImmutableSnapshot();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(nodes.Length, snapshot.Count);
|
||||
for (int i = 0; i < nodes.Length; i++)
|
||||
{
|
||||
Assert.Equal(nodes[i].Id, snapshot[i].Id);
|
||||
Assert.Equal(nodes[i].Kind, snapshot[i].Kind);
|
||||
Assert.Equal(nodes[i].Delta, snapshot[i].Delta);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToJson_ProducesValidJson()
|
||||
{
|
||||
// Arrange
|
||||
var ledger = new ProofLedger();
|
||||
foreach (var node in CreateTestNodes(count: 2))
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
|
||||
// Act
|
||||
var json = ledger.ToJson();
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(json);
|
||||
Assert.Contains("nodes", json);
|
||||
Assert.Contains("rootHash", json);
|
||||
Assert.Contains("sha256:", json);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FromJson_RoundTrip_PreservesIntegrity()
|
||||
{
|
||||
// Arrange
|
||||
var ledger = new ProofLedger();
|
||||
foreach (var node in CreateTestNodes(count: 3))
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
var originalHash = ledger.RootHash();
|
||||
|
||||
// Act
|
||||
var json = ledger.ToJson();
|
||||
var restored = ProofLedger.FromJson(json);
|
||||
|
||||
// Assert
|
||||
Assert.True(restored.VerifyIntegrity());
|
||||
Assert.Equal(originalHash, restored.RootHash());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RootHash_EmptyLedger_ProducesConsistentHash()
|
||||
{
|
||||
// Arrange
|
||||
var ledger1 = new ProofLedger();
|
||||
var ledger2 = new ProofLedger();
|
||||
|
||||
// Act
|
||||
var hash1 = ledger1.RootHash();
|
||||
var hash2 = ledger2.RootHash();
|
||||
|
||||
// Assert
|
||||
Assert.Equal(hash1, hash2);
|
||||
Assert.StartsWith("sha256:", hash1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NodeHash_SameNodeRecreated_ProducesSameHash()
|
||||
{
|
||||
// Arrange
|
||||
var node1 = ProofNode.Create(
|
||||
id: "test-id",
|
||||
kind: ProofNodeKind.Delta,
|
||||
ruleId: "rule-x",
|
||||
actor: "scorer",
|
||||
tsUtc: FixedTimestamp,
|
||||
seed: TestSeed,
|
||||
delta: 0.15,
|
||||
total: 0.45,
|
||||
parentIds: ["parent-1", "parent-2"],
|
||||
evidenceRefs: ["sha256:abc123"]);
|
||||
|
||||
var node2 = ProofNode.Create(
|
||||
id: "test-id",
|
||||
kind: ProofNodeKind.Delta,
|
||||
ruleId: "rule-x",
|
||||
actor: "scorer",
|
||||
tsUtc: FixedTimestamp,
|
||||
seed: TestSeed,
|
||||
delta: 0.15,
|
||||
total: 0.45,
|
||||
parentIds: ["parent-1", "parent-2"],
|
||||
evidenceRefs: ["sha256:abc123"]);
|
||||
|
||||
// Act
|
||||
var hashedNode1 = ProofHashing.WithHash(node1);
|
||||
var hashedNode2 = ProofHashing.WithHash(node2);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(hashedNode1.NodeHash, hashedNode2.NodeHash);
|
||||
Assert.StartsWith("sha256:", hashedNode1.NodeHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NodeHash_DifferentTimestamp_ProducesDifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var node1 = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed);
|
||||
var node2 = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp.AddSeconds(1), TestSeed);
|
||||
|
||||
// Act
|
||||
var hashedNode1 = ProofHashing.WithHash(node1);
|
||||
var hashedNode2 = ProofHashing.WithHash(node2);
|
||||
|
||||
// Assert
|
||||
Assert.NotEqual(hashedNode1.NodeHash, hashedNode2.NodeHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyNodeHash_ValidHash_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var node = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed);
|
||||
var hashedNode = ProofHashing.WithHash(node);
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(ProofHashing.VerifyNodeHash(hashedNode));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyRootHash_ValidHash_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var ledger = new ProofLedger();
|
||||
foreach (var node in CreateTestNodes(count: 3))
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
var rootHash = ledger.RootHash();
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(ProofHashing.VerifyRootHash(ledger.Nodes, rootHash));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyRootHash_TamperedHash_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var ledger = new ProofLedger();
|
||||
foreach (var node in CreateTestNodes(count: 3))
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
var tamperedHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
// Act & Assert
|
||||
Assert.False(ProofHashing.VerifyRootHash(ledger.Nodes, tamperedHash));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ConcurrentAppends_ProduceDeterministicOrder()
|
||||
{
|
||||
// Arrange - run same sequence multiple times
|
||||
var results = new List<string>();
|
||||
|
||||
for (int run = 0; run < 10; run++)
|
||||
{
|
||||
var ledger = new ProofLedger();
|
||||
var nodes = CreateTestNodes(count: 10);
|
||||
|
||||
foreach (var node in nodes)
|
||||
{
|
||||
ledger.Append(node);
|
||||
}
|
||||
|
||||
results.Add(ledger.RootHash());
|
||||
}
|
||||
|
||||
// Assert - all runs should produce identical hash
|
||||
Assert.True(results.All(h => h == results[0]));
|
||||
}
|
||||
|
||||
private static ProofNode[] CreateTestNodes(int count)
|
||||
{
|
||||
var nodes = new ProofNode[count];
|
||||
double runningTotal = 0;
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
var delta = 0.1 * (i + 1);
|
||||
runningTotal += delta;
|
||||
|
||||
var kind = i switch
|
||||
{
|
||||
0 => ProofNodeKind.Input,
|
||||
_ when i == count - 1 => ProofNodeKind.Score,
|
||||
_ when i % 2 == 0 => ProofNodeKind.Transform,
|
||||
_ => ProofNodeKind.Delta
|
||||
};
|
||||
|
||||
nodes[i] = ProofNode.Create(
|
||||
id: $"node-{i:D3}",
|
||||
kind: kind,
|
||||
ruleId: $"rule-{i}",
|
||||
actor: "test-scorer",
|
||||
tsUtc: FixedTimestamp.AddMilliseconds(i * 100),
|
||||
seed: TestSeed,
|
||||
delta: delta,
|
||||
total: runningTotal,
|
||||
parentIds: i > 0 ? [$"node-{i - 1:D3}"] : null,
|
||||
evidenceRefs: [$"sha256:evidence{i:D3}"]);
|
||||
}
|
||||
|
||||
return nodes;
|
||||
}
|
||||
}
|
||||
@@ -1,277 +0,0 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyLoaderEdgeCaseTests.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-009 - Unit tests for YAML parsing edge cases
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for YAML parsing edge cases in ScorePolicyLoader.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3402")]
|
||||
public sealed class ScorePolicyLoaderEdgeCaseTests
|
||||
{
|
||||
private readonly ScorePolicyLoader _loader = new();
|
||||
|
||||
[Fact(DisplayName = "Empty YAML throws ScorePolicyLoadException")]
|
||||
public void EmptyYaml_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromYaml("");
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Empty YAML content*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Whitespace-only YAML throws ScorePolicyLoadException")]
|
||||
public void WhitespaceOnlyYaml_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromYaml(" \n \t ");
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Empty YAML content*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Null path throws ArgumentException")]
|
||||
public void NullPath_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromFile(null!);
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Empty path throws ArgumentException")]
|
||||
public void EmptyPath_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromFile("");
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Non-existent file throws ScorePolicyLoadException")]
|
||||
public void NonExistentFile_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromFile("/nonexistent/path/score.yaml");
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*not found*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Invalid YAML syntax throws ScorePolicyLoadException")]
|
||||
public void InvalidYamlSyntax_Throws()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
- invalid nested list
|
||||
""";
|
||||
|
||||
var act = () => _loader.LoadFromYaml(yaml);
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*YAML parse error*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Unsupported policy version throws ScorePolicyLoadException")]
|
||||
public void UnsupportedPolicyVersion_Throws()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v2
|
||||
policyId: test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
""";
|
||||
|
||||
var act = () => _loader.LoadFromYaml(yaml);
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Unsupported policy version 'score.v2'*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Weights not summing to 10000 throws ScorePolicyLoadException")]
|
||||
public void WeightsSumNot10000_Throws()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: test
|
||||
weightsBps:
|
||||
baseSeverity: 5000
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 1000
|
||||
""";
|
||||
|
||||
var act = () => _loader.LoadFromYaml(yaml);
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Weight basis points must sum to 10000*Got: 11000*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Valid minimal policy parses successfully")]
|
||||
public void ValidMinimalPolicy_Parses()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: minimal-test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.Should().NotBeNull();
|
||||
policy.PolicyVersion.Should().Be("score.v1");
|
||||
policy.PolicyId.Should().Be("minimal-test");
|
||||
policy.WeightsBps.BaseSeverity.Should().Be(2500);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with optional fields parses successfully")]
|
||||
public void PolicyWithOptionalFields_Parses()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: full-test
|
||||
policyName: Full Test Policy
|
||||
description: A comprehensive test policy
|
||||
weightsBps:
|
||||
baseSeverity: 3000
|
||||
reachability: 3000
|
||||
evidence: 2000
|
||||
provenance: 2000
|
||||
reachabilityConfig:
|
||||
reachableMultiplier: 1.5
|
||||
unreachableMultiplier: 0.5
|
||||
unknownMultiplier: 1.0
|
||||
evidenceConfig:
|
||||
kevWeight: 1.2
|
||||
epssThreshold: 0.5
|
||||
epssWeight: 0.8
|
||||
provenanceConfig:
|
||||
signedBonus: 0.1
|
||||
rekorVerifiedBonus: 0.2
|
||||
unsignedPenalty: -0.1
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.Should().NotBeNull();
|
||||
policy.PolicyName.Should().Be("Full Test Policy");
|
||||
policy.Description.Should().Be("A comprehensive test policy");
|
||||
policy.ReachabilityConfig.Should().NotBeNull();
|
||||
policy.ReachabilityConfig!.ReachableMultiplier.Should().Be(1.5m);
|
||||
policy.EvidenceConfig.Should().NotBeNull();
|
||||
policy.EvidenceConfig!.KevWeight.Should().Be(1.2m);
|
||||
policy.ProvenanceConfig.Should().NotBeNull();
|
||||
policy.ProvenanceConfig!.SignedBonus.Should().Be(0.1m);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with overrides parses correctly")]
|
||||
public void PolicyWithOverrides_Parses()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: override-test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
overrides:
|
||||
- id: cve-log4j
|
||||
match:
|
||||
cvePattern: "CVE-2021-44228"
|
||||
action:
|
||||
setScore: 10.0
|
||||
reason: Known critical vulnerability
|
||||
- id: low-severity-suppress
|
||||
match:
|
||||
severityEquals: LOW
|
||||
action:
|
||||
multiplyScore: 0.5
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.Should().NotBeNull();
|
||||
policy.Overrides.Should().HaveCount(2);
|
||||
policy.Overrides![0].Id.Should().Be("cve-log4j");
|
||||
policy.Overrides[0].Match!.CvePattern.Should().Be("CVE-2021-44228");
|
||||
policy.Overrides[0].Action!.SetScore.Should().Be(10.0m);
|
||||
policy.Overrides[1].Id.Should().Be("low-severity-suppress");
|
||||
policy.Overrides[1].Action!.MultiplyScore.Should().Be(0.5m);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "TryLoadFromFile returns null for non-existent file")]
|
||||
public void TryLoadFromFile_NonExistent_ReturnsNull()
|
||||
{
|
||||
var result = _loader.TryLoadFromFile("/nonexistent/path/score.yaml");
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Extra YAML fields are ignored")]
|
||||
public void ExtraYamlFields_Ignored()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: extra-fields-test
|
||||
unknownField: should be ignored
|
||||
anotherUnknown:
|
||||
nested: value
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
extraWeight: 1000
|
||||
""";
|
||||
|
||||
// Should not throw despite extra fields
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
policy.Should().NotBeNull();
|
||||
policy.PolicyId.Should().Be("extra-fields-test");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Unicode in policy name and description is preserved")]
|
||||
public void UnicodePreserved()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: unicode-test
|
||||
policyName: "Política de Segurança 安全策略"
|
||||
description: "Deutsche Sicherheitsrichtlinie für контейнеры"
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.PolicyName.Should().Be("Política de Segurança 安全策略");
|
||||
policy.Description.Should().Contain("контейнеры");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Boundary weight values (0 and 10000) are valid")]
|
||||
public void BoundaryWeightValues_Valid()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: boundary-test
|
||||
weightsBps:
|
||||
baseSeverity: 10000
|
||||
reachability: 0
|
||||
evidence: 0
|
||||
provenance: 0
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.WeightsBps.BaseSeverity.Should().Be(10000);
|
||||
policy.WeightsBps.Reachability.Should().Be(0);
|
||||
}
|
||||
}
|
||||
@@ -1,298 +0,0 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyValidatorTests.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-010 - Unit tests for schema validation
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JSON Schema validation in ScorePolicyValidator.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3402")]
|
||||
public sealed class ScorePolicyValidatorTests
|
||||
{
|
||||
private readonly ScorePolicyValidator _validator = new();
|
||||
|
||||
[Fact(DisplayName = "Valid policy passes validation")]
|
||||
public void ValidPolicy_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy();
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Errors.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with wrong version fails validation")]
|
||||
public void WrongVersion_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with { PolicyVersion = "score.v2" };
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with missing policyId fails validation")]
|
||||
public void MissingPolicyId_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with { PolicyId = "" };
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with negative weight fails validation")]
|
||||
public void NegativeWeight_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = -100,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 5100
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("baseSeverity") || e.Contains("minimum"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with weight over 10000 fails validation")]
|
||||
public void WeightOver10000_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 15000,
|
||||
Reachability = 0,
|
||||
Evidence = 0,
|
||||
Provenance = 0
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with valid reachability config passes")]
|
||||
public void ValidReachabilityConfig_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
ReachabilityConfig = new ReachabilityConfig
|
||||
{
|
||||
ReachableMultiplier = 1.5m,
|
||||
UnreachableMultiplier = 0.5m,
|
||||
UnknownMultiplier = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with reachable multiplier over 2 fails")]
|
||||
public void ReachableMultiplierOver2_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
ReachabilityConfig = new ReachabilityConfig
|
||||
{
|
||||
ReachableMultiplier = 3.0m,
|
||||
UnreachableMultiplier = 0.5m,
|
||||
UnknownMultiplier = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with valid evidence config passes")]
|
||||
public void ValidEvidenceConfig_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
EvidenceConfig = new EvidenceConfig
|
||||
{
|
||||
KevWeight = 1.5m,
|
||||
EpssThreshold = 0.5m,
|
||||
EpssWeight = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with EPSS threshold over 1 fails")]
|
||||
public void EpssThresholdOver1_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
EvidenceConfig = new EvidenceConfig
|
||||
{
|
||||
KevWeight = 1.0m,
|
||||
EpssThreshold = 1.5m,
|
||||
EpssWeight = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with valid override passes")]
|
||||
public void ValidOverride_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
Overrides =
|
||||
[
|
||||
new ScoreOverride
|
||||
{
|
||||
Id = "test-override",
|
||||
Match = new OverrideMatch { CvePattern = "CVE-2021-.*" },
|
||||
Action = new OverrideAction { SetScore = 10.0m },
|
||||
Reason = "Test override"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Override without id fails")]
|
||||
public void OverrideWithoutId_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
Overrides =
|
||||
[
|
||||
new ScoreOverride
|
||||
{
|
||||
Id = "",
|
||||
Match = new OverrideMatch { CvePattern = "CVE-2021-.*" }
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
// id is required but empty string is invalid
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ThrowIfInvalid throws for invalid policy")]
|
||||
public void ThrowIfInvalid_Throws()
|
||||
{
|
||||
var policy = CreateValidPolicy() with { PolicyVersion = "invalid" };
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
var act = () => result.ThrowIfInvalid("test context");
|
||||
|
||||
act.Should().Throw<ScorePolicyValidationException>()
|
||||
.WithMessage("test context*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ThrowIfInvalid does not throw for valid policy")]
|
||||
public void ThrowIfInvalid_DoesNotThrow()
|
||||
{
|
||||
var policy = CreateValidPolicy();
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
var act = () => result.ThrowIfInvalid();
|
||||
|
||||
act.Should().NotThrow();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with valid JSON passes")]
|
||||
public void ValidateJson_Valid_Passes()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"policyVersion": "score.v1",
|
||||
"policyId": "json-test",
|
||||
"weightsBps": {
|
||||
"baseSeverity": 2500,
|
||||
"reachability": 2500,
|
||||
"evidence": 2500,
|
||||
"provenance": 2500
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _validator.ValidateJson(json);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with invalid JSON fails")]
|
||||
public void ValidateJson_InvalidJson_Fails()
|
||||
{
|
||||
var json = "{ invalid json }";
|
||||
|
||||
var result = _validator.ValidateJson(json);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("Invalid JSON"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with empty string fails")]
|
||||
public void ValidateJson_Empty_Fails()
|
||||
{
|
||||
var result = _validator.ValidateJson("");
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("empty"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with missing required fields fails")]
|
||||
public void ValidateJson_MissingRequired_Fails()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"policyVersion": "score.v1"
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _validator.ValidateJson(json);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
private static ScorePolicy CreateValidPolicy() => new()
|
||||
{
|
||||
PolicyVersion = "score.v1",
|
||||
PolicyId = "test-policy",
|
||||
PolicyName = "Test Policy",
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 2500,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 2500
|
||||
}
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user