5100* tests strengthtenen work
This commit is contained in:
@@ -0,0 +1,521 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AttestationDeterminismTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
|
||||
// Task: ATTESTOR-5100-014 - Add determinism test: same inputs → same attestation payload hash (excluding non-deterministic signatures)
|
||||
// Description: Determinism tests for attestation payload generation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Attestor.Types.Tests.Determinism;
|
||||
|
||||
/// <summary>
|
||||
/// Determinism tests for attestation generation.
|
||||
/// Validates:
|
||||
/// - Same inputs produce same payload hash (excluding signatures)
|
||||
/// - Canonical JSON serialization is stable
|
||||
/// - Field ordering is deterministic
|
||||
/// - Unicode normalization is consistent
|
||||
/// - Whitespace handling is deterministic
|
||||
/// </summary>
|
||||
[Trait("Category", "Determinism")]
|
||||
[Trait("Category", "Attestor")]
|
||||
[Trait("Category", "Integration")]
|
||||
public sealed class AttestationDeterminismTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
private readonly JsonSerializerOptions _canonicalOptions;
|
||||
|
||||
public AttestationDeterminismTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
_canonicalOptions = new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
}
|
||||
|
||||
#region Same Inputs Same Hash Tests
|
||||
|
||||
[Fact]
|
||||
public void SameInputs_ProduceSamePayloadHash()
|
||||
{
|
||||
// Arrange
|
||||
var subject = new SubjectDto
|
||||
{
|
||||
Name = "pkg:npm/test-package@1.0.0",
|
||||
Digest = new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = "4d5f6e7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5e"
|
||||
}
|
||||
};
|
||||
|
||||
var predicate = new
|
||||
{
|
||||
builder = new { id = "https://stellaops.io/builder/v1" },
|
||||
buildType = "https://stellaops.io/buildType/scan/v1",
|
||||
invocation = new { configSource = new { uri = "https://example.com/config" } }
|
||||
};
|
||||
|
||||
// Act - create statement multiple times
|
||||
var hashes = new List<string>();
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { subject },
|
||||
predicate: predicate);
|
||||
|
||||
var json = JsonSerializer.Serialize(statement, _canonicalOptions);
|
||||
var hash = ComputeSha256(json);
|
||||
hashes.Add(hash);
|
||||
}
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1, "same inputs should produce same hash every time");
|
||||
_output.WriteLine($"✓ Deterministic hash: {hashes[0]}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MultipleSubjects_OrderPreserved_SameHash()
|
||||
{
|
||||
// Arrange
|
||||
var subjects = new[]
|
||||
{
|
||||
new SubjectDto { Name = "pkg:npm/a@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('a', 64) } },
|
||||
new SubjectDto { Name = "pkg:npm/b@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('b', 64) } },
|
||||
new SubjectDto { Name = "pkg:npm/c@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('c', 64) } }
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash1 = CreateStatementHash(subjects);
|
||||
var hash2 = CreateStatementHash(subjects);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "same subject order should produce same hash");
|
||||
_output.WriteLine($"✓ Multi-subject hash: {hash1}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SubjectOrderMatters_DifferentOrder_DifferentHash()
|
||||
{
|
||||
// Arrange
|
||||
var subjects1 = new[]
|
||||
{
|
||||
new SubjectDto { Name = "pkg:npm/a@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('a', 64) } },
|
||||
new SubjectDto { Name = "pkg:npm/b@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('b', 64) } }
|
||||
};
|
||||
|
||||
var subjects2 = new[]
|
||||
{
|
||||
new SubjectDto { Name = "pkg:npm/b@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('b', 64) } },
|
||||
new SubjectDto { Name = "pkg:npm/a@1.0.0", Digest = new Dictionary<string, string> { ["sha256"] = new string('a', 64) } }
|
||||
};
|
||||
|
||||
// Act
|
||||
var hash1 = CreateStatementHash(subjects1);
|
||||
var hash2 = CreateStatementHash(subjects2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().NotBe(hash2, "different subject order should produce different hash");
|
||||
_output.WriteLine($"Order 1 hash: {hash1}");
|
||||
_output.WriteLine($"Order 2 hash: {hash2}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Canonical JSON Tests
|
||||
|
||||
[Fact]
|
||||
public void CanonicalJson_NoWhitespace()
|
||||
{
|
||||
// Arrange
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: new { test = true });
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(statement, _canonicalOptions);
|
||||
|
||||
// Assert
|
||||
json.Should().NotContain("\n", "canonical JSON should have no newlines");
|
||||
json.Should().NotContain("\r", "canonical JSON should have no carriage returns");
|
||||
json.Should().NotContain(" ", "canonical JSON should have no double spaces");
|
||||
_output.WriteLine($"Canonical JSON length: {json.Length}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanonicalJson_FieldOrderDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var statement1 = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: new { a = 1, b = 2, c = 3 });
|
||||
|
||||
var statement2 = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: new { a = 1, b = 2, c = 3 });
|
||||
|
||||
// Act
|
||||
var json1 = JsonSerializer.Serialize(statement1, _canonicalOptions);
|
||||
var json2 = JsonSerializer.Serialize(statement2, _canonicalOptions);
|
||||
|
||||
// Assert
|
||||
json1.Should().Be(json2, "field order should be deterministic");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanonicalJson_NullsOmitted()
|
||||
{
|
||||
// Arrange
|
||||
var statement = new InTotoStatement
|
||||
{
|
||||
Type = "https://in-toto.io/Statement/v1",
|
||||
Subject = new[] { CreateTestSubject() },
|
||||
PredicateType = "https://slsa.dev/provenance/v1",
|
||||
Predicate = new { value = (string?)null, present = "yes" }
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(statement, _canonicalOptions);
|
||||
|
||||
// Assert
|
||||
json.Should().NotContain("null", "null values should be omitted");
|
||||
json.Should().Contain("present", "non-null values should be present");
|
||||
_output.WriteLine($"JSON with nulls omitted: {json}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Unicode Normalization Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("café", "café")] // NFC vs NFD
|
||||
[InlineData("naïve", "naïve")]
|
||||
[InlineData("über", "über")]
|
||||
public void UnicodeNormalization_ConsistentHandling(string input1, string input2)
|
||||
{
|
||||
// Arrange
|
||||
var subject1 = new SubjectDto
|
||||
{
|
||||
Name = $"pkg:npm/{input1}@1.0.0",
|
||||
Digest = new Dictionary<string, string> { ["sha256"] = new string('a', 64) }
|
||||
};
|
||||
|
||||
var subject2 = new SubjectDto
|
||||
{
|
||||
Name = $"pkg:npm/{input2}@1.0.0",
|
||||
Digest = new Dictionary<string, string> { ["sha256"] = new string('a', 64) }
|
||||
};
|
||||
|
||||
// Act
|
||||
var json1 = JsonSerializer.Serialize(subject1, _canonicalOptions);
|
||||
var json2 = JsonSerializer.Serialize(subject2, _canonicalOptions);
|
||||
|
||||
// Assert - same input should produce same output
|
||||
if (input1 == input2)
|
||||
{
|
||||
var hash1 = ComputeSha256(json1);
|
||||
var hash2 = ComputeSha256(json2);
|
||||
hash1.Should().Be(hash2);
|
||||
_output.WriteLine($"✓ Unicode '{input1}' consistent: {hash1}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void UnicodeEscaping_Deterministic()
|
||||
{
|
||||
// Arrange
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { new SubjectDto
|
||||
{
|
||||
Name = "pkg:npm/test-🎉@1.0.0",
|
||||
Digest = new Dictionary<string, string> { ["sha256"] = new string('a', 64) }
|
||||
}},
|
||||
predicate: new { emoji = "🚀" });
|
||||
|
||||
// Act
|
||||
var json1 = JsonSerializer.Serialize(statement, _canonicalOptions);
|
||||
var json2 = JsonSerializer.Serialize(statement, _canonicalOptions);
|
||||
|
||||
// Assert
|
||||
json1.Should().Be(json2);
|
||||
_output.WriteLine($"Unicode JSON: {json1}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Timestamp Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void TimestampFormat_Iso8601_Deterministic()
|
||||
{
|
||||
// Arrange
|
||||
var timestamp = new DateTime(2025, 1, 1, 12, 0, 0, DateTimeKind.Utc);
|
||||
var formatted1 = timestamp.ToString("O");
|
||||
var formatted2 = timestamp.ToString("O");
|
||||
|
||||
// Assert
|
||||
formatted1.Should().Be(formatted2);
|
||||
formatted1.Should().MatchRegex(@"\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}");
|
||||
_output.WriteLine($"ISO8601 timestamp: {formatted1}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void StatementWithTimestamp_SameTimestamp_SameHash()
|
||||
{
|
||||
// Arrange
|
||||
var fixedTimestamp = "2025-01-01T00:00:00Z";
|
||||
|
||||
var predicate1 = new { buildStartedOn = fixedTimestamp };
|
||||
var predicate2 = new { buildStartedOn = fixedTimestamp };
|
||||
|
||||
var statement1 = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: predicate1);
|
||||
|
||||
var statement2 = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: predicate2);
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeSha256(JsonSerializer.Serialize(statement1, _canonicalOptions));
|
||||
var hash2 = ComputeSha256(JsonSerializer.Serialize(statement2, _canonicalOptions));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
_output.WriteLine($"✓ Fixed timestamp hash: {hash1}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Digest Algorithm Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void MultipleDigestAlgorithms_OrderDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var subject = new SubjectDto
|
||||
{
|
||||
Name = "pkg:npm/multi-digest@1.0.0",
|
||||
Digest = new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = new string('a', 64),
|
||||
["sha512"] = new string('b', 128)
|
||||
}
|
||||
};
|
||||
|
||||
// Act - serialize multiple times
|
||||
var jsons = new List<string>();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
jsons.Add(JsonSerializer.Serialize(subject, _canonicalOptions));
|
||||
}
|
||||
|
||||
// Assert - all serializations should be identical
|
||||
jsons.Distinct().Should().HaveCount(1);
|
||||
_output.WriteLine($"Multi-digest JSON: {jsons[0]}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Large Payload Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void LargePayload_DeterministicHash()
|
||||
{
|
||||
// Arrange
|
||||
var largeComponents = Enumerable.Range(0, 1000)
|
||||
.Select(i => new
|
||||
{
|
||||
name = $"component-{i:D4}",
|
||||
version = $"{i / 100}.{i % 100}.0",
|
||||
digest = $"sha256:{i:x64}"
|
||||
})
|
||||
.ToArray();
|
||||
|
||||
var predicate = new { components = largeComponents };
|
||||
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://cyclonedx.org/bom/v1.6",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: predicate);
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeSha256(JsonSerializer.Serialize(statement, _canonicalOptions));
|
||||
var hash2 = ComputeSha256(JsonSerializer.Serialize(statement, _canonicalOptions));
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
_output.WriteLine($"✓ Large payload ({largeComponents.Length} components) hash: {hash1}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Parallel Generation Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ParallelGeneration_SameHash()
|
||||
{
|
||||
// Arrange
|
||||
var predicate = new { test = "parallel" };
|
||||
var subjects = new[] { CreateTestSubject() };
|
||||
|
||||
// Act - generate in parallel
|
||||
var tasks = Enumerable.Range(0, 10)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: subjects,
|
||||
predicate: predicate);
|
||||
return ComputeSha256(JsonSerializer.Serialize(statement, _canonicalOptions));
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
var hashes = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
hashes.Distinct().Should().HaveCount(1, "parallel generation should produce same hash");
|
||||
_output.WriteLine($"✓ Parallel generation ({tasks.Length} threads) hash: {hashes[0]}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Signature Exclusion Tests
|
||||
|
||||
[Fact]
|
||||
public void PayloadHash_ExcludesSignatures()
|
||||
{
|
||||
// Arrange
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: new[] { CreateTestSubject() },
|
||||
predicate: new { test = true });
|
||||
|
||||
var json = JsonSerializer.Serialize(statement, _canonicalOptions);
|
||||
var payloadHash = ComputeSha256(json);
|
||||
|
||||
// Create envelope with different signatures
|
||||
var envelope1 = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(json)),
|
||||
signatures = new[] { new { keyid = "key1", sig = "sig1" } }
|
||||
};
|
||||
|
||||
var envelope2 = new
|
||||
{
|
||||
payloadType = "application/vnd.in-toto+json",
|
||||
payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(json)),
|
||||
signatures = new[] { new { keyid = "key2", sig = "sig2" } }
|
||||
};
|
||||
|
||||
// Act - extract and hash payloads
|
||||
var extractedPayload1 = Convert.FromBase64String(envelope1.payload);
|
||||
var extractedPayload2 = Convert.FromBase64String(envelope2.payload);
|
||||
var hash1 = ComputeSha256(extractedPayload1);
|
||||
var hash2 = ComputeSha256(extractedPayload2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "payload hash should be independent of signatures");
|
||||
hash1.Should().Be(payloadHash, "extracted payload should match original");
|
||||
_output.WriteLine($"✓ Payload hash (signature-independent): {payloadHash}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static InTotoStatement CreateInTotoStatement(
|
||||
string predicateType,
|
||||
IEnumerable<SubjectDto> subjects,
|
||||
object predicate)
|
||||
{
|
||||
return new InTotoStatement
|
||||
{
|
||||
Type = "https://in-toto.io/Statement/v1",
|
||||
Subject = subjects.ToArray(),
|
||||
PredicateType = predicateType,
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
private string CreateStatementHash(IEnumerable<SubjectDto> subjects)
|
||||
{
|
||||
var statement = CreateInTotoStatement(
|
||||
predicateType: "https://slsa.dev/provenance/v1",
|
||||
subjects: subjects,
|
||||
predicate: new { test = true });
|
||||
|
||||
return ComputeSha256(JsonSerializer.Serialize(statement, _canonicalOptions));
|
||||
}
|
||||
|
||||
private static SubjectDto CreateTestSubject()
|
||||
{
|
||||
return new SubjectDto
|
||||
{
|
||||
Name = "pkg:npm/test-package@1.0.0",
|
||||
Digest = new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = "4d5f6e7a8b9c0d1e2f3a4b5c6d7e8f9a0b1c2d3e4f5a6b7c8d9e0f1a2b3c4d5e"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string data)
|
||||
{
|
||||
return ComputeSha256(Encoding.UTF8.GetBytes(data));
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return "sha256:" + Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Types
|
||||
|
||||
private record SubjectDto
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; init; } = "";
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
public Dictionary<string, string> Digest { get; init; } = new();
|
||||
}
|
||||
|
||||
private record InTotoStatement
|
||||
{
|
||||
[JsonPropertyName("_type")]
|
||||
public string Type { get; init; } = "https://in-toto.io/Statement/v1";
|
||||
|
||||
[JsonPropertyName("subject")]
|
||||
public IReadOnlyList<SubjectDto> Subject { get; init; } = Array.Empty<SubjectDto>();
|
||||
|
||||
[JsonPropertyName("predicateType")]
|
||||
public string PredicateType { get; init; } = "";
|
||||
|
||||
[JsonPropertyName("predicate")]
|
||||
public object? Predicate { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,550 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SbomAttestationSignVerifyIntegrationTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
|
||||
// Task: ATTESTOR-5100-013 - Add integration test: generate SBOM → create attestation → sign → store → verify → replay → same digest
|
||||
// Description: End-to-end integration tests for SBOM attestation workflow
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Attestor.Types.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the complete SBOM attestation workflow:
|
||||
/// 1. Generate SBOM (mock)
|
||||
/// 2. Create attestation statement
|
||||
/// 3. Sign attestation (DSSE envelope)
|
||||
/// 4. Store attestation
|
||||
/// 5. Verify attestation
|
||||
/// 6. Replay attestation
|
||||
/// 7. Verify digest matches original
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Category", "Attestor")]
|
||||
[Trait("Category", "E2E")]
|
||||
public sealed class SbomAttestationSignVerifyIntegrationTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public SbomAttestationSignVerifyIntegrationTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
#region Full Workflow Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SbomToAttestationWorkflow_EndToEnd_ProducesVerifiableAttestation()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var signer = new MockSigner();
|
||||
var store = new MockAttestationStore();
|
||||
|
||||
// Step 1: Generate SBOM
|
||||
var sbom = GenerateSpdxSbom("pkg:npm/test-package@1.0.0");
|
||||
var sbomDigest = ComputeSha256(sbom);
|
||||
_output.WriteLine($"Step 1: Generated SBOM with digest {sbomDigest}");
|
||||
|
||||
// Step 2: Create attestation statement
|
||||
var statement = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/test-package@1.0.0", sbomDigest) },
|
||||
predicate: sbom);
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
_output.WriteLine($"Step 2: Created statement of type {statement.PredicateType}");
|
||||
|
||||
// Step 3: Sign attestation (create DSSE envelope)
|
||||
var envelope = await signer.SignAsync(statementJson, "application/vnd.in-toto+json");
|
||||
envelope.Should().NotBeNull();
|
||||
envelope.Signatures.Should().NotBeEmpty();
|
||||
_output.WriteLine($"Step 3: Signed with {envelope.Signatures.Count} signature(s)");
|
||||
|
||||
// Step 4: Store attestation
|
||||
var storeResult = await store.StoreAsync(envelope);
|
||||
storeResult.Should().BeTrue();
|
||||
_output.WriteLine($"Step 4: Stored attestation with ID {envelope.Signatures[0].KeyId}");
|
||||
|
||||
// Step 5: Verify attestation
|
||||
var verifyResult = await signer.VerifyAsync(envelope);
|
||||
verifyResult.Should().BeTrue();
|
||||
_output.WriteLine($"Step 5: Verification succeeded");
|
||||
|
||||
// Step 6: Replay attestation (retrieve from store)
|
||||
var replayedEnvelope = await store.RetrieveAsync(envelope.Signatures[0].KeyId);
|
||||
replayedEnvelope.Should().NotBeNull();
|
||||
_output.WriteLine($"Step 6: Replayed attestation from store");
|
||||
|
||||
// Step 7: Verify digest matches original
|
||||
var originalPayload = Convert.FromBase64String(envelope.Payload);
|
||||
var replayedPayload = Convert.FromBase64String(replayedEnvelope!.Payload);
|
||||
var originalDigest = ComputeSha256(originalPayload);
|
||||
var replayedDigest = ComputeSha256(replayedPayload);
|
||||
|
||||
originalDigest.Should().Be(replayedDigest, "replayed attestation should have same digest");
|
||||
_output.WriteLine($"Step 7: Original digest matches replayed digest: {originalDigest}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomToAttestationWorkflow_CycloneDx_ProducesVerifiableAttestation()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var signer = new MockSigner();
|
||||
var store = new MockAttestationStore();
|
||||
|
||||
// Step 1: Generate CycloneDX SBOM
|
||||
var sbom = GenerateCycloneDxSbom("pkg:npm/cyclonedx-test@2.0.0");
|
||||
var sbomDigest = ComputeSha256(sbom);
|
||||
_output.WriteLine($"Step 1: Generated CycloneDX SBOM with digest {sbomDigest}");
|
||||
|
||||
// Step 2: Create attestation statement
|
||||
var statement = attestor.CreateStatement(
|
||||
predicateType: "https://cyclonedx.org/bom/v1.6",
|
||||
subjects: new[] { new Subject("pkg:npm/cyclonedx-test@2.0.0", sbomDigest) },
|
||||
predicate: sbom);
|
||||
|
||||
// Step 3: Sign attestation
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
var envelope = await signer.SignAsync(statementJson, "application/vnd.in-toto+json");
|
||||
|
||||
// Step 4: Store
|
||||
await store.StoreAsync(envelope);
|
||||
|
||||
// Step 5: Verify
|
||||
var verified = await signer.VerifyAsync(envelope);
|
||||
verified.Should().BeTrue();
|
||||
|
||||
// Step 6 & 7: Replay and compare
|
||||
var replayed = await store.RetrieveAsync(envelope.Signatures[0].KeyId);
|
||||
replayed.Should().NotBeNull();
|
||||
|
||||
var originalDigest = ComputeSha256(Convert.FromBase64String(envelope.Payload));
|
||||
var replayedDigest = ComputeSha256(Convert.FromBase64String(replayed!.Payload));
|
||||
originalDigest.Should().Be(replayedDigest);
|
||||
|
||||
_output.WriteLine("✓ CycloneDX workflow completed successfully");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomToAttestationWorkflow_MultipleSubjects_AllVerified()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var signer = new MockSigner();
|
||||
|
||||
// Generate SBOM with multiple components
|
||||
var sbom = GenerateSpdxSbom("pkg:npm/multi-component@1.0.0");
|
||||
var sbomDigest = ComputeSha256(sbom);
|
||||
|
||||
// Multiple subjects
|
||||
var subjects = new[]
|
||||
{
|
||||
new Subject("pkg:npm/multi-component@1.0.0", sbomDigest),
|
||||
new Subject("pkg:npm/dependency-a@1.0.0", "sha256:aaaa" + new string('0', 56)),
|
||||
new Subject("pkg:npm/dependency-b@2.0.0", "sha256:bbbb" + new string('0', 56))
|
||||
};
|
||||
|
||||
// Act
|
||||
var statement = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: subjects,
|
||||
predicate: sbom);
|
||||
|
||||
var statementJson = JsonSerializer.Serialize(statement);
|
||||
var envelope = await signer.SignAsync(statementJson, "application/vnd.in-toto+json");
|
||||
|
||||
// Assert
|
||||
var verified = await signer.VerifyAsync(envelope);
|
||||
verified.Should().BeTrue();
|
||||
|
||||
// Deserialize and verify all subjects present
|
||||
var payload = Convert.FromBase64String(envelope.Payload);
|
||||
var deserializedStatement = JsonSerializer.Deserialize<InTotoStatement>(payload);
|
||||
deserializedStatement!.Subject.Should().HaveCount(3);
|
||||
|
||||
_output.WriteLine($"✓ Multi-subject workflow completed with {subjects.Length} subjects");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SameInputs_ProduceSameStatementDigest()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var sbom = GenerateSpdxSbom("pkg:npm/deterministic-test@1.0.0");
|
||||
var sbomDigest = ComputeSha256(sbom);
|
||||
|
||||
// Act - create same statement twice
|
||||
var statement1 = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/deterministic-test@1.0.0", sbomDigest) },
|
||||
predicate: sbom);
|
||||
|
||||
var statement2 = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/deterministic-test@1.0.0", sbomDigest) },
|
||||
predicate: sbom);
|
||||
|
||||
// Assert - canonical JSON should be identical
|
||||
var json1 = JsonSerializer.Serialize(statement1, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
var json2 = JsonSerializer.Serialize(statement2, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
});
|
||||
|
||||
var digest1 = ComputeSha256(Encoding.UTF8.GetBytes(json1));
|
||||
var digest2 = ComputeSha256(Encoding.UTF8.GetBytes(json2));
|
||||
|
||||
digest1.Should().Be(digest2, "same inputs should produce same statement digest");
|
||||
_output.WriteLine($"✓ Deterministic digest: {digest1}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayedAttestation_VerifiesIdentically()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var signer = new MockSigner();
|
||||
var store = new MockAttestationStore();
|
||||
|
||||
var sbom = GenerateSpdxSbom("pkg:npm/replay-test@1.0.0");
|
||||
var statement = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/replay-test@1.0.0", ComputeSha256(sbom)) },
|
||||
predicate: sbom);
|
||||
|
||||
var envelope = await signer.SignAsync(
|
||||
JsonSerializer.Serialize(statement),
|
||||
"application/vnd.in-toto+json");
|
||||
|
||||
await store.StoreAsync(envelope);
|
||||
|
||||
// Act - verify multiple times after replay
|
||||
var results = new List<bool>();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var replayed = await store.RetrieveAsync(envelope.Signatures[0].KeyId);
|
||||
var verified = await signer.VerifyAsync(replayed!);
|
||||
results.Add(verified);
|
||||
}
|
||||
|
||||
// Assert
|
||||
results.Should().OnlyContain(r => r == true, "all replay verifications should succeed");
|
||||
_output.WriteLine($"✓ All {results.Count} replay verifications succeeded");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Tamper Detection Tests
|
||||
|
||||
[Fact]
|
||||
public async Task TamperedAttestation_FailsVerification()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var signer = new MockSigner();
|
||||
|
||||
var sbom = GenerateSpdxSbom("pkg:npm/tamper-test@1.0.0");
|
||||
var statement = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/tamper-test@1.0.0", ComputeSha256(sbom)) },
|
||||
predicate: sbom);
|
||||
|
||||
var envelope = await signer.SignAsync(
|
||||
JsonSerializer.Serialize(statement),
|
||||
"application/vnd.in-toto+json");
|
||||
|
||||
// Act - tamper with the payload
|
||||
var tamperedPayload = Convert.FromBase64String(envelope.Payload);
|
||||
tamperedPayload[0] ^= 0xFF; // Flip bits
|
||||
var tamperedEnvelope = new DsseEnvelope
|
||||
{
|
||||
PayloadType = envelope.PayloadType,
|
||||
Payload = Convert.ToBase64String(tamperedPayload),
|
||||
Signatures = envelope.Signatures
|
||||
};
|
||||
|
||||
// Assert
|
||||
var verified = await signer.VerifyAsync(tamperedEnvelope);
|
||||
verified.Should().BeFalse("tampered payload should fail verification");
|
||||
_output.WriteLine("✓ Tampered attestation correctly rejected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ModifiedSubjectDigest_FailsVerification()
|
||||
{
|
||||
// Arrange
|
||||
var attestor = new MockAttestor();
|
||||
var signer = new MockSigner();
|
||||
|
||||
var sbom = GenerateSpdxSbom("pkg:npm/subject-tamper@1.0.0");
|
||||
var realDigest = ComputeSha256(sbom);
|
||||
var fakeDigest = "sha256:" + new string('f', 64);
|
||||
|
||||
var statement = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/subject-tamper@1.0.0", realDigest) },
|
||||
predicate: sbom);
|
||||
|
||||
var envelope = await signer.SignAsync(
|
||||
JsonSerializer.Serialize(statement),
|
||||
"application/vnd.in-toto+json");
|
||||
|
||||
// Act - verify original succeeds
|
||||
var originalVerified = await signer.VerifyAsync(envelope);
|
||||
originalVerified.Should().BeTrue();
|
||||
|
||||
// Modify the statement to have wrong digest and re-encode
|
||||
var tamperedStatement = attestor.CreateStatement(
|
||||
predicateType: "https://spdx.dev/Document/v3",
|
||||
subjects: new[] { new Subject("pkg:npm/subject-tamper@1.0.0", fakeDigest) },
|
||||
predicate: sbom);
|
||||
|
||||
var tamperedEnvelope = new DsseEnvelope
|
||||
{
|
||||
PayloadType = envelope.PayloadType,
|
||||
Payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(JsonSerializer.Serialize(tamperedStatement))),
|
||||
Signatures = envelope.Signatures // Original signature
|
||||
};
|
||||
|
||||
// Assert - tampered envelope should fail
|
||||
var tamperedVerified = await signer.VerifyAsync(tamperedEnvelope);
|
||||
tamperedVerified.Should().BeFalse("modified subject digest should fail verification");
|
||||
_output.WriteLine("✓ Modified subject digest correctly rejected");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static string GenerateSpdxSbom(string purl)
|
||||
{
|
||||
var sbom = new
|
||||
{
|
||||
spdxVersion = "SPDX-3.0.1",
|
||||
creationInfo = new
|
||||
{
|
||||
created = "2025-01-01T00:00:00Z",
|
||||
createdBy = new[] { "StellaOps" }
|
||||
},
|
||||
name = $"SBOM for {purl}",
|
||||
packages = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
SPDXID = $"SPDXRef-{purl.Replace(":", "-").Replace("@", "-")}",
|
||||
name = purl.Split('/').Last().Split('@').First(),
|
||||
versionInfo = purl.Split('@').Last(),
|
||||
externalRefs = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
referenceCategory = "PACKAGE-MANAGER",
|
||||
referenceType = "purl",
|
||||
referenceLocator = purl
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return JsonSerializer.Serialize(sbom, new JsonSerializerOptions { WriteIndented = false });
|
||||
}
|
||||
|
||||
private static string GenerateCycloneDxSbom(string purl)
|
||||
{
|
||||
var sbom = new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.6",
|
||||
version = 1,
|
||||
metadata = new
|
||||
{
|
||||
timestamp = "2025-01-01T00:00:00Z",
|
||||
tools = new[] { new { name = "StellaOps", version = "1.0.0" } }
|
||||
},
|
||||
components = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
type = "library",
|
||||
name = purl.Split('/').Last().Split('@').First(),
|
||||
version = purl.Split('@').Last(),
|
||||
purl
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return JsonSerializer.Serialize(sbom, new JsonSerializerOptions { WriteIndented = false });
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return "sha256:" + Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static string ComputeSha256(string data)
|
||||
{
|
||||
return ComputeSha256(Encoding.UTF8.GetBytes(data));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mock Types
|
||||
|
||||
private record Subject(string Name, string Digest);
|
||||
|
||||
private record InTotoStatement
|
||||
{
|
||||
public string Type { get; init; } = "https://in-toto.io/Statement/v1";
|
||||
public IReadOnlyList<SubjectDto> Subject { get; init; } = Array.Empty<SubjectDto>();
|
||||
public string PredicateType { get; init; } = "";
|
||||
public object? Predicate { get; init; }
|
||||
}
|
||||
|
||||
private record SubjectDto
|
||||
{
|
||||
public string Name { get; init; } = "";
|
||||
public Dictionary<string, string> Digest { get; init; } = new();
|
||||
}
|
||||
|
||||
private sealed class MockAttestor
|
||||
{
|
||||
public InTotoStatement CreateStatement(
|
||||
string predicateType,
|
||||
IEnumerable<Subject> subjects,
|
||||
object predicate)
|
||||
{
|
||||
return new InTotoStatement
|
||||
{
|
||||
Type = "https://in-toto.io/Statement/v1",
|
||||
Subject = subjects.Select(s => new SubjectDto
|
||||
{
|
||||
Name = s.Name,
|
||||
Digest = new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = s.Digest.Replace("sha256:", "")
|
||||
}
|
||||
}).ToList(),
|
||||
PredicateType = predicateType,
|
||||
Predicate = JsonSerializer.Deserialize<object>(predicate?.ToString() ?? "{}")
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class DsseEnvelope
|
||||
{
|
||||
public string PayloadType { get; init; } = "";
|
||||
public string Payload { get; init; } = "";
|
||||
public IReadOnlyList<DsseSignature> Signatures { get; init; } = Array.Empty<DsseSignature>();
|
||||
}
|
||||
|
||||
private sealed class DsseSignature
|
||||
{
|
||||
public string KeyId { get; init; } = "";
|
||||
public string Sig { get; init; } = "";
|
||||
}
|
||||
|
||||
private sealed class MockSigner
|
||||
{
|
||||
private readonly Dictionary<string, byte[]> _keys = new();
|
||||
|
||||
public Task<DsseEnvelope> SignAsync(string payload, string payloadType)
|
||||
{
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(payload);
|
||||
var keyId = Guid.NewGuid().ToString();
|
||||
|
||||
// Create deterministic "signature" (HMAC-like for testing)
|
||||
var key = RandomNumberGenerator.GetBytes(32);
|
||||
_keys[keyId] = key;
|
||||
|
||||
var pae = CreatePae(payloadType, payloadBytes);
|
||||
var sig = HMACSHA256.HashData(key, pae);
|
||||
|
||||
return Task.FromResult(new DsseEnvelope
|
||||
{
|
||||
PayloadType = payloadType,
|
||||
Payload = Convert.ToBase64String(payloadBytes),
|
||||
Signatures = new[]
|
||||
{
|
||||
new DsseSignature
|
||||
{
|
||||
KeyId = keyId,
|
||||
Sig = Convert.ToBase64String(sig)
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public Task<bool> VerifyAsync(DsseEnvelope envelope)
|
||||
{
|
||||
if (envelope.Signatures.Count == 0) return Task.FromResult(false);
|
||||
|
||||
var sig = envelope.Signatures[0];
|
||||
if (!_keys.TryGetValue(sig.KeyId, out var key))
|
||||
{
|
||||
// Unknown key - verification fails
|
||||
return Task.FromResult(false);
|
||||
}
|
||||
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
var pae = CreatePae(envelope.PayloadType, payloadBytes);
|
||||
var expectedSig = HMACSHA256.HashData(key, pae);
|
||||
var actualSig = Convert.FromBase64String(sig.Sig);
|
||||
|
||||
return Task.FromResult(CryptographicOperations.FixedTimeEquals(expectedSig, actualSig));
|
||||
}
|
||||
|
||||
private static byte[] CreatePae(string type, byte[] payload)
|
||||
{
|
||||
// PAE(type, payload) = "DSSEv1" + len(type) + type + len(payload) + payload
|
||||
using var ms = new MemoryStream();
|
||||
using var writer = new BinaryWriter(ms);
|
||||
|
||||
var typeBytes = Encoding.UTF8.GetBytes(type);
|
||||
writer.Write(Encoding.UTF8.GetBytes("DSSEv1 "));
|
||||
writer.Write((long)typeBytes.Length);
|
||||
writer.Write(typeBytes);
|
||||
writer.Write((long)payload.Length);
|
||||
writer.Write(payload);
|
||||
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed class MockAttestationStore
|
||||
{
|
||||
private readonly Dictionary<string, DsseEnvelope> _store = new();
|
||||
|
||||
public Task<bool> StoreAsync(DsseEnvelope envelope)
|
||||
{
|
||||
if (envelope.Signatures.Count == 0) return Task.FromResult(false);
|
||||
|
||||
var id = envelope.Signatures[0].KeyId;
|
||||
_store[id] = envelope;
|
||||
return Task.FromResult(true);
|
||||
}
|
||||
|
||||
public Task<DsseEnvelope?> RetrieveAsync(string id)
|
||||
{
|
||||
return Task.FromResult(_store.TryGetValue(id, out var envelope) ? envelope : null);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,596 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorInclusionProofTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
|
||||
// Task: ATTESTOR-5100-008 - Add Rekor transparency log inclusion proof tests: verify inclusion proof for logged attestation
|
||||
// Description: Tests for Rekor Merkle tree inclusion proof verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for Rekor Merkle tree inclusion proof verification.
|
||||
/// Validates:
|
||||
/// - Valid inclusion proofs verify correctly
|
||||
/// - Tampered inclusion proofs fail verification
|
||||
/// - Proof path computation is correct
|
||||
/// - Edge cases (empty tree, single node, etc.) are handled
|
||||
/// </summary>
|
||||
[Trait("Category", "Rekor")]
|
||||
[Trait("Category", "InclusionProof")]
|
||||
[Trait("Category", "MerkleTree")]
|
||||
[Trait("Category", "L0")]
|
||||
public sealed class RekorInclusionProofTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public RekorInclusionProofTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
#region Basic Inclusion Proof Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_ValidProof_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
var entries = new[] { "entry1", "entry2", "entry3", "entry4" };
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
tree.Append(Encoding.UTF8.GetBytes(entry));
|
||||
}
|
||||
|
||||
// Get proof for entry at index 2
|
||||
var leafData = Encoding.UTF8.GetBytes("entry3");
|
||||
var proof = tree.GetInclusionProof(2);
|
||||
|
||||
// Act
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: leafData,
|
||||
leafIndex: 2,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
// Assert
|
||||
verified.Should().BeTrue("valid inclusion proof should verify");
|
||||
|
||||
_output.WriteLine($"Tree size: {tree.Size}");
|
||||
_output.WriteLine($"Root hash: {Convert.ToHexString(tree.RootHash).ToLower()}");
|
||||
_output.WriteLine($"Proof path length: {proof.Count}");
|
||||
_output.WriteLine("✓ Inclusion proof verified");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_AllEntries_AllVerify()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
var entries = new[] { "entry0", "entry1", "entry2", "entry3", "entry4", "entry5", "entry6", "entry7" };
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
tree.Append(Encoding.UTF8.GetBytes(entry));
|
||||
}
|
||||
|
||||
_output.WriteLine($"Tree with {tree.Size} entries:");
|
||||
_output.WriteLine($"Root hash: {Convert.ToHexString(tree.RootHash).ToLower()}");
|
||||
|
||||
// Act & Assert - verify each entry
|
||||
for (int i = 0; i < entries.Length; i++)
|
||||
{
|
||||
var leafData = Encoding.UTF8.GetBytes(entries[i]);
|
||||
var proof = tree.GetInclusionProof(i);
|
||||
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: leafData,
|
||||
leafIndex: i,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
verified.Should().BeTrue($"entry {i} should verify");
|
||||
_output.WriteLine($" Entry {i}: ✓ (proof path: {proof.Count} nodes)");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Tampered Proof Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_TamperedLeafData_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
|
||||
var proof = tree.GetInclusionProof(0);
|
||||
|
||||
// Use tampered leaf data
|
||||
var tamperedLeaf = Encoding.UTF8.GetBytes("tampered-entry");
|
||||
|
||||
// Act
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: tamperedLeaf,
|
||||
leafIndex: 0,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
// Assert
|
||||
verified.Should().BeFalse("tampered leaf should not verify");
|
||||
_output.WriteLine("✓ Tampered leaf data detected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_TamperedProofPath_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry3"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry4"));
|
||||
|
||||
var proof = tree.GetInclusionProof(0).ToList();
|
||||
|
||||
// Tamper with a proof node
|
||||
if (proof.Count > 0)
|
||||
{
|
||||
proof[0] = new byte[32]; // Zero out first proof node
|
||||
}
|
||||
|
||||
// Act
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes("entry1"),
|
||||
leafIndex: 0,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
// Assert
|
||||
verified.Should().BeFalse("tampered proof path should not verify");
|
||||
_output.WriteLine("✓ Tampered proof path detected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_TamperedRootHash_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
|
||||
var proof = tree.GetInclusionProof(0);
|
||||
var tamperedRoot = new byte[32]; // Zero root
|
||||
|
||||
// Act
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes("entry1"),
|
||||
leafIndex: 0,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tamperedRoot,
|
||||
proof: proof);
|
||||
|
||||
// Assert
|
||||
verified.Should().BeFalse("tampered root hash should not verify");
|
||||
_output.WriteLine("✓ Tampered root hash detected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_WrongIndex_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry3"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry4"));
|
||||
|
||||
// Get proof for index 2, but verify at wrong index
|
||||
var proof = tree.GetInclusionProof(2);
|
||||
|
||||
// Act
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes("entry3"),
|
||||
leafIndex: 1, // Wrong index!
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
// Assert
|
||||
verified.Should().BeFalse("wrong index should not verify");
|
||||
_output.WriteLine("✓ Wrong index detected");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Case Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_SingleNodeTree_Verifies()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("only-entry"));
|
||||
|
||||
var proof = tree.GetInclusionProof(0);
|
||||
|
||||
// Act
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes("only-entry"),
|
||||
leafIndex: 0,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
// Assert
|
||||
verified.Should().BeTrue("single node tree should verify");
|
||||
proof.Should().BeEmpty("single node tree needs no proof path");
|
||||
|
||||
_output.WriteLine("✓ Single node tree verified");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_TwoNodeTree_Verifies()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
|
||||
// Verify both entries
|
||||
var proof0 = tree.GetInclusionProof(0);
|
||||
var proof1 = tree.GetInclusionProof(1);
|
||||
|
||||
// Act
|
||||
var verified0 = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes("entry1"),
|
||||
leafIndex: 0,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof0);
|
||||
|
||||
var verified1 = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes("entry2"),
|
||||
leafIndex: 1,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof1);
|
||||
|
||||
// Assert
|
||||
verified0.Should().BeTrue("entry 0 should verify");
|
||||
verified1.Should().BeTrue("entry 1 should verify");
|
||||
|
||||
_output.WriteLine("✓ Two node tree verified");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_LargeTree_Verifies()
|
||||
{
|
||||
// Arrange - create a tree with many entries
|
||||
var tree = new MockMerkleTree();
|
||||
const int entryCount = 128;
|
||||
|
||||
for (int i = 0; i < entryCount; i++)
|
||||
{
|
||||
tree.Append(Encoding.UTF8.GetBytes($"entry-{i}"));
|
||||
}
|
||||
|
||||
// Verify some entries at different positions
|
||||
var indicesToVerify = new[] { 0, 1, 63, 64, 100, 127 };
|
||||
|
||||
_output.WriteLine($"Tree with {entryCount} entries");
|
||||
_output.WriteLine($"Expected proof length: ~{Math.Log2(entryCount)} nodes");
|
||||
|
||||
// Act & Assert
|
||||
foreach (var index in indicesToVerify)
|
||||
{
|
||||
var proof = tree.GetInclusionProof(index);
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes($"entry-{index}"),
|
||||
leafIndex: index,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
verified.Should().BeTrue($"entry {index} should verify");
|
||||
_output.WriteLine($" Entry {index}: ✓ (proof path: {proof.Count} nodes)");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusionProof_NonPowerOfTwoTree_Verifies()
|
||||
{
|
||||
// Arrange - 5 entries (not a power of 2)
|
||||
var tree = new MockMerkleTree();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
tree.Append(Encoding.UTF8.GetBytes($"entry-{i}"));
|
||||
}
|
||||
|
||||
_output.WriteLine($"Non-power-of-two tree: {tree.Size} entries");
|
||||
|
||||
// Act & Assert - verify all entries
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var proof = tree.GetInclusionProof(i);
|
||||
var verified = tree.VerifyInclusionProof(
|
||||
leafData: Encoding.UTF8.GetBytes($"entry-{i}"),
|
||||
leafIndex: i,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof);
|
||||
|
||||
verified.Should().BeTrue($"entry {i} should verify in non-power-of-two tree");
|
||||
}
|
||||
|
||||
_output.WriteLine("✓ Non-power-of-two tree verified");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Proof Structure Tests
|
||||
|
||||
[Fact]
|
||||
public void GetInclusionProof_ReturnsCorrectPathLength()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
for (int i = 0; i < 16; i++)
|
||||
{
|
||||
tree.Append(Encoding.UTF8.GetBytes($"entry-{i}"));
|
||||
}
|
||||
|
||||
// For a balanced tree of 16 elements, proof length should be log2(16) = 4
|
||||
var expectedPathLength = (int)Math.Ceiling(Math.Log2(16));
|
||||
|
||||
// Act
|
||||
var proof = tree.GetInclusionProof(7);
|
||||
|
||||
// Assert
|
||||
proof.Count.Should().BeLessOrEqualTo(expectedPathLength + 1,
|
||||
"proof path should be approximately log2(n) nodes");
|
||||
|
||||
_output.WriteLine($"Tree size: 16, Proof length: {proof.Count}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InclusionProof_PathNodesAre32Bytes()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry3"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry4"));
|
||||
|
||||
// Act
|
||||
var proof = tree.GetInclusionProof(0);
|
||||
|
||||
// Assert - all nodes should be 32 bytes (SHA-256)
|
||||
proof.Should().AllSatisfy(node =>
|
||||
node.Length.Should().Be(32, "each proof node should be 32 bytes (SHA-256)"));
|
||||
|
||||
_output.WriteLine($"Proof has {proof.Count} nodes, all 32 bytes");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void InclusionProofVerification_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
|
||||
var proof = tree.GetInclusionProof(0);
|
||||
var leafData = Encoding.UTF8.GetBytes("entry1");
|
||||
|
||||
// Act - verify multiple times
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => tree.VerifyInclusionProof(
|
||||
leafData: leafData,
|
||||
leafIndex: 0,
|
||||
treeSize: tree.Size,
|
||||
rootHash: tree.RootHash,
|
||||
proof: proof))
|
||||
.ToList();
|
||||
|
||||
// Assert - all results should be identical
|
||||
results.Should().AllBeEquivalentTo(true);
|
||||
|
||||
_output.WriteLine("✓ Verification is deterministic across 10 runs");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RootHashComputation_IsDeterministic()
|
||||
{
|
||||
// Arrange & Act
|
||||
var roots = new List<byte[]>();
|
||||
for (int i = 0; i < 5; i++)
|
||||
{
|
||||
var tree = new MockMerkleTree();
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry1"));
|
||||
tree.Append(Encoding.UTF8.GetBytes("entry2"));
|
||||
roots.Add(tree.RootHash);
|
||||
}
|
||||
|
||||
// Assert - all roots should be identical
|
||||
roots.Should().AllBeEquivalentTo(roots[0],
|
||||
"root hash should be deterministic for same inputs");
|
||||
|
||||
_output.WriteLine($"Deterministic root: {Convert.ToHexString(roots[0]).ToLower()}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mock Merkle Tree Implementation
|
||||
|
||||
/// <summary>
|
||||
/// Simplified Merkle tree implementation for testing.
|
||||
/// Uses RFC 6962 conventions (0x00 prefix for leaf, 0x01 for inner node).
|
||||
/// </summary>
|
||||
private sealed class MockMerkleTree
|
||||
{
|
||||
private readonly List<byte[]> _leaves = new();
|
||||
private byte[]? _rootHash;
|
||||
|
||||
public int Size => _leaves.Count;
|
||||
public byte[] RootHash => _rootHash ?? ComputeRootHash();
|
||||
|
||||
public void Append(byte[] data)
|
||||
{
|
||||
var leafHash = HashLeaf(data);
|
||||
_leaves.Add(leafHash);
|
||||
_rootHash = null; // Invalidate cached root
|
||||
}
|
||||
|
||||
public IReadOnlyList<byte[]> GetInclusionProof(int index)
|
||||
{
|
||||
if (index < 0 || index >= _leaves.Count)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(index));
|
||||
}
|
||||
|
||||
if (_leaves.Count == 1)
|
||||
{
|
||||
return Array.Empty<byte[]>();
|
||||
}
|
||||
|
||||
var proof = new List<byte[]>();
|
||||
ComputeProof(_leaves.ToArray(), index, proof);
|
||||
return proof;
|
||||
}
|
||||
|
||||
public bool VerifyInclusionProof(
|
||||
byte[] leafData,
|
||||
int leafIndex,
|
||||
int treeSize,
|
||||
byte[] rootHash,
|
||||
IReadOnlyList<byte[]> proof)
|
||||
{
|
||||
var leafHash = HashLeaf(leafData);
|
||||
var computedRoot = RecomputeRoot(leafHash, leafIndex, treeSize, proof);
|
||||
return computedRoot.SequenceEqual(rootHash);
|
||||
}
|
||||
|
||||
private byte[] ComputeRootHash()
|
||||
{
|
||||
if (_leaves.Count == 0)
|
||||
{
|
||||
return SHA256.HashData(Array.Empty<byte>());
|
||||
}
|
||||
|
||||
var nodes = _leaves.ToList();
|
||||
|
||||
while (nodes.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (int i = 0; i < nodes.Count; i += 2)
|
||||
{
|
||||
if (i + 1 < nodes.Count)
|
||||
{
|
||||
nextLevel.Add(HashInner(nodes[i], nodes[i + 1]));
|
||||
}
|
||||
else
|
||||
{
|
||||
nextLevel.Add(nodes[i]); // Odd node promoted
|
||||
}
|
||||
}
|
||||
|
||||
nodes = nextLevel;
|
||||
}
|
||||
|
||||
return nodes[0];
|
||||
}
|
||||
|
||||
private void ComputeProof(byte[][] leaves, int index, List<byte[]> proof)
|
||||
{
|
||||
if (leaves.Length <= 1)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var nextLevel = new List<byte[]>();
|
||||
var siblingIndex = (index % 2 == 0) ? index + 1 : index - 1;
|
||||
|
||||
if (siblingIndex < leaves.Length)
|
||||
{
|
||||
proof.Add(leaves[siblingIndex]);
|
||||
}
|
||||
|
||||
for (int i = 0; i < leaves.Length; i += 2)
|
||||
{
|
||||
if (i + 1 < leaves.Length)
|
||||
{
|
||||
nextLevel.Add(HashInner(leaves[i], leaves[i + 1]));
|
||||
}
|
||||
else
|
||||
{
|
||||
nextLevel.Add(leaves[i]);
|
||||
}
|
||||
}
|
||||
|
||||
if (nextLevel.Count > 1)
|
||||
{
|
||||
ComputeProof(nextLevel.ToArray(), index / 2, proof);
|
||||
}
|
||||
}
|
||||
|
||||
private static byte[] RecomputeRoot(byte[] leafHash, int index, int treeSize, IReadOnlyList<byte[]> proof)
|
||||
{
|
||||
var current = leafHash;
|
||||
var currentIndex = index;
|
||||
|
||||
foreach (var sibling in proof)
|
||||
{
|
||||
if (currentIndex % 2 == 0)
|
||||
{
|
||||
current = HashInner(current, sibling);
|
||||
}
|
||||
else
|
||||
{
|
||||
current = HashInner(sibling, current);
|
||||
}
|
||||
|
||||
currentIndex /= 2;
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private static byte[] HashLeaf(byte[] data)
|
||||
{
|
||||
var prefixed = new byte[data.Length + 1];
|
||||
prefixed[0] = 0x00; // Leaf prefix per RFC 6962
|
||||
Array.Copy(data, 0, prefixed, 1, data.Length);
|
||||
return SHA256.HashData(prefixed);
|
||||
}
|
||||
|
||||
private static byte[] HashInner(byte[] left, byte[] right)
|
||||
{
|
||||
var combined = new byte[left.Length + right.Length + 1];
|
||||
combined[0] = 0x01; // Inner node prefix per RFC 6962
|
||||
Array.Copy(left, 0, combined, 1, left.Length);
|
||||
Array.Copy(right, 0, combined, 1 + left.Length, right.Length);
|
||||
return SHA256.HashData(combined);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,551 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorReceiptGenerationTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
|
||||
// Task: ATTESTOR-5100-006 - Add Rekor receipt generation tests: attestation → Rekor entry → receipt returned
|
||||
// Description: Tests for Rekor transparency log receipt generation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for Rekor receipt generation workflow.
|
||||
/// Validates:
|
||||
/// - Attestation can be submitted to Rekor and receipt is returned
|
||||
/// - Receipt contains required fields (UUID, index, log URL, integrated time)
|
||||
/// - Proof structure is valid (checkpoint, inclusion proof)
|
||||
/// - Error handling for submission failures
|
||||
/// </summary>
|
||||
[Trait("Category", "Rekor")]
|
||||
[Trait("Category", "ReceiptGeneration")]
|
||||
[Trait("Category", "L0")]
|
||||
public sealed class RekorReceiptGenerationTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public RekorReceiptGenerationTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
#region Receipt Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_ValidDsseEnvelope_ReturnsReceipt()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
response.Should().NotBeNull();
|
||||
response.Uuid.Should().NotBeNullOrEmpty("UUID should be assigned");
|
||||
response.Status.Should().Be("included", "entry should be included in log");
|
||||
response.Index.Should().BeGreaterOrEqualTo(0, "index should be assigned");
|
||||
|
||||
_output.WriteLine($"✓ Receipt generated:");
|
||||
_output.WriteLine($" UUID: {response.Uuid}");
|
||||
_output.WriteLine($" Index: {response.Index}");
|
||||
_output.WriteLine($" Status: {response.Status}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_ReturnsLogUrl()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
response.LogUrl.Should().NotBeNullOrEmpty("log URL should be provided");
|
||||
response.LogUrl.Should().StartWith("https://", "log URL should be HTTPS");
|
||||
|
||||
_output.WriteLine($"Log URL: {response.LogUrl}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_ReturnsIntegratedTime()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
var beforeSubmit = DateTimeOffset.UtcNow;
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
response.IntegratedTime.Should().NotBeNull("integrated time should be set");
|
||||
response.IntegratedTimeUtc.Should().NotBeNull();
|
||||
response.IntegratedTimeUtc!.Value.Should().BeOnOrAfter(beforeSubmit.AddMinutes(-5),
|
||||
"integrated time should be recent (allowing for clock skew)");
|
||||
|
||||
_output.WriteLine($"Integrated time: {response.IntegratedTimeUtc:O}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_ReturnsProofWithCheckpoint()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
response.Proof.Should().NotBeNull("proof should be included");
|
||||
response.Proof!.Checkpoint.Should().NotBeNull("checkpoint should be present");
|
||||
response.Proof.Checkpoint!.Origin.Should().NotBeNullOrEmpty("checkpoint origin should be set");
|
||||
response.Proof.Checkpoint.Size.Should().BeGreaterThan(0, "checkpoint size should be positive");
|
||||
response.Proof.Checkpoint.RootHash.Should().NotBeNullOrEmpty("root hash should be present");
|
||||
|
||||
_output.WriteLine($"Checkpoint:");
|
||||
_output.WriteLine($" Origin: {response.Proof.Checkpoint.Origin}");
|
||||
_output.WriteLine($" Size: {response.Proof.Checkpoint.Size}");
|
||||
_output.WriteLine($" Root hash: {response.Proof.Checkpoint.RootHash}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_ReturnsInclusionProof()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
response.Proof.Should().NotBeNull();
|
||||
response.Proof!.Inclusion.Should().NotBeNull("inclusion proof should be present");
|
||||
response.Proof.Inclusion!.LeafHash.Should().NotBeNullOrEmpty("leaf hash should be present");
|
||||
response.Proof.Inclusion.Path.Should().NotBeEmpty("inclusion path should have elements");
|
||||
|
||||
_output.WriteLine($"Inclusion proof:");
|
||||
_output.WriteLine($" Leaf hash: {response.Proof.Inclusion.LeafHash}");
|
||||
_output.WriteLine($" Path length: {response.Proof.Inclusion.Path.Count}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region UUID Format Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_UuidFormat_IsValid()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert - Rekor UUIDs are typically 64 hex characters
|
||||
response.Uuid.Should().MatchRegex("^[a-f0-9]{64}$",
|
||||
"UUID should be 64 hex characters");
|
||||
|
||||
_output.WriteLine($"UUID format validated: {response.Uuid}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_DifferentAttestations_GetDifferentUuids()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation1 = CreateValidDsseEnvelope("subject1");
|
||||
var attestation2 = CreateValidDsseEnvelope("subject2");
|
||||
|
||||
// Act
|
||||
var response1 = await client.SubmitAsync(attestation1);
|
||||
var response2 = await client.SubmitAsync(attestation2);
|
||||
|
||||
// Assert
|
||||
response1.Uuid.Should().NotBe(response2.Uuid,
|
||||
"different attestations should get different UUIDs");
|
||||
|
||||
_output.WriteLine($"UUID 1: {response1.Uuid}");
|
||||
_output.WriteLine($"UUID 2: {response2.Uuid}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Idempotency Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_SameAttestation_ReturnsSameUuid()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient { EnableIdempotency = true };
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response1 = await client.SubmitAsync(attestation);
|
||||
var response2 = await client.SubmitAsync(attestation);
|
||||
|
||||
// Assert - submitting the same attestation should return the same entry
|
||||
response1.Uuid.Should().Be(response2.Uuid,
|
||||
"resubmitting same attestation should return same UUID");
|
||||
response1.Index.Should().Be(response2.Index,
|
||||
"index should be the same for duplicate submissions");
|
||||
|
||||
_output.WriteLine($"Idempotent submission verified: {response1.Uuid}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Error Handling Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_InvalidEnvelope_ReturnsError()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var invalidAttestation = new DsseEnvelope
|
||||
{
|
||||
PayloadType = "", // Invalid - empty
|
||||
Payload = "" // Invalid - empty
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await client.TrySubmitAsync(invalidAttestation);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("REKOR_INVALID_ENTRY");
|
||||
result.ErrorMessage.Should().Contain("invalid");
|
||||
|
||||
_output.WriteLine($"Error handled: {result.ErrorCode} - {result.ErrorMessage}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_RekorUnavailable_ReturnsConnectionError()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient { SimulateUnavailable = true };
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var result = await client.TrySubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("REKOR_UNAVAILABLE");
|
||||
result.ErrorMessage.Should().Contain("unavailable");
|
||||
|
||||
_output.WriteLine($"Unavailable handled: {result.ErrorMessage}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SubmitAttestation_Timeout_ReturnsTimeoutError()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient { SimulateTimeout = true };
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var result = await client.TrySubmitAsync(attestation);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("REKOR_TIMEOUT");
|
||||
|
||||
_output.WriteLine($"Timeout handled: {result.ErrorMessage}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Backend Configuration Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("rekor.sigstore.dev", "https://rekor.sigstore.dev")]
|
||||
[InlineData("rekor.example.com", "https://rekor.example.com")]
|
||||
public async Task SubmitAttestation_DifferentBackends_UsesCorrectUrl(string backend, string expectedBaseUrl)
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitToBackendAsync(attestation, backend);
|
||||
|
||||
// Assert
|
||||
response.LogUrl.Should().StartWith(expectedBaseUrl);
|
||||
|
||||
_output.WriteLine($"Backend {backend} → {response.LogUrl}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Receipt Serialization Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Receipt_SerializesToValidJson()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var response = await client.SubmitAsync(attestation);
|
||||
var json = JsonSerializer.Serialize(response, new JsonSerializerOptions { WriteIndented = true });
|
||||
|
||||
// Assert
|
||||
json.Should().NotBeNullOrEmpty();
|
||||
|
||||
Action parseJson = () => JsonDocument.Parse(json);
|
||||
parseJson.Should().NotThrow("receipt should serialize to valid JSON");
|
||||
|
||||
_output.WriteLine($"Receipt JSON:\n{json}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Receipt_RoundtripsSerializationCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var client = new MockRekorClient();
|
||||
var attestation = CreateValidDsseEnvelope();
|
||||
|
||||
// Act
|
||||
var original = await client.SubmitAsync(attestation);
|
||||
var json = JsonSerializer.Serialize(original);
|
||||
var deserialized = JsonSerializer.Deserialize<RekorSubmissionResponse>(json);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Uuid.Should().Be(original.Uuid);
|
||||
deserialized.Index.Should().Be(original.Index);
|
||||
deserialized.Status.Should().Be(original.Status);
|
||||
deserialized.IntegratedTime.Should().Be(original.IntegratedTime);
|
||||
|
||||
_output.WriteLine("✓ Receipt serialization roundtrips correctly");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Classes
|
||||
|
||||
private static DsseEnvelope CreateValidDsseEnvelope(string subjectName = "pkg:npm/example@1.0.0")
|
||||
{
|
||||
var statement = new
|
||||
{
|
||||
_type = "https://in-toto.io/Statement/v0.1",
|
||||
subject = new[]
|
||||
{
|
||||
new
|
||||
{
|
||||
name = subjectName,
|
||||
digest = new Dictionary<string, string>
|
||||
{
|
||||
["sha256"] = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(subjectName))).ToLower()
|
||||
}
|
||||
}
|
||||
},
|
||||
predicateType = "https://slsa.dev/provenance/v1",
|
||||
predicate = new { buildType = "test" }
|
||||
};
|
||||
|
||||
var payloadBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(statement));
|
||||
var payloadBase64 = Convert.ToBase64String(payloadBytes);
|
||||
|
||||
// Simulate signature
|
||||
var signatureBytes = SHA256.HashData(payloadBytes);
|
||||
|
||||
return new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = payloadBase64,
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new()
|
||||
{
|
||||
KeyId = "test-key-id",
|
||||
Sig = Convert.ToBase64String(signatureBytes)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mock Types
|
||||
|
||||
private sealed class DsseEnvelope
|
||||
{
|
||||
public string PayloadType { get; set; } = "";
|
||||
public string Payload { get; set; } = "";
|
||||
public List<DsseSignature> Signatures { get; set; } = new();
|
||||
}
|
||||
|
||||
private sealed class DsseSignature
|
||||
{
|
||||
public string KeyId { get; set; } = "";
|
||||
public string Sig { get; set; } = "";
|
||||
}
|
||||
|
||||
private sealed class RekorSubmissionResponse
|
||||
{
|
||||
public string Uuid { get; set; } = "";
|
||||
public long? Index { get; set; }
|
||||
public string? LogUrl { get; set; }
|
||||
public string Status { get; set; } = "included";
|
||||
public RekorProofResponse? Proof { get; set; }
|
||||
public long? IntegratedTime { get; set; }
|
||||
|
||||
public DateTimeOffset? IntegratedTimeUtc =>
|
||||
IntegratedTime.HasValue
|
||||
? DateTimeOffset.FromUnixTimeSeconds(IntegratedTime.Value)
|
||||
: null;
|
||||
}
|
||||
|
||||
private sealed class RekorProofResponse
|
||||
{
|
||||
public RekorCheckpoint? Checkpoint { get; set; }
|
||||
public RekorInclusionProof? Inclusion { get; set; }
|
||||
}
|
||||
|
||||
private sealed class RekorCheckpoint
|
||||
{
|
||||
public string? Origin { get; set; }
|
||||
public long Size { get; set; }
|
||||
public string? RootHash { get; set; }
|
||||
public DateTimeOffset? Timestamp { get; set; }
|
||||
}
|
||||
|
||||
private sealed class RekorInclusionProof
|
||||
{
|
||||
public string? LeafHash { get; set; }
|
||||
public IReadOnlyList<string> Path { get; set; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
private record SubmissionResult(
|
||||
bool Success,
|
||||
RekorSubmissionResponse? Response = null,
|
||||
string ErrorCode = "",
|
||||
string ErrorMessage = "");
|
||||
|
||||
private sealed class MockRekorClient
|
||||
{
|
||||
private long _nextIndex = 1000;
|
||||
private readonly Dictionary<string, RekorSubmissionResponse> _entries = new();
|
||||
|
||||
public bool EnableIdempotency { get; set; } = false;
|
||||
public bool SimulateUnavailable { get; set; } = false;
|
||||
public bool SimulateTimeout { get; set; } = false;
|
||||
|
||||
public Task<RekorSubmissionResponse> SubmitAsync(DsseEnvelope envelope)
|
||||
{
|
||||
var result = TrySubmitAsync(envelope).Result;
|
||||
if (!result.Success)
|
||||
{
|
||||
throw new InvalidOperationException(result.ErrorMessage);
|
||||
}
|
||||
return Task.FromResult(result.Response!);
|
||||
}
|
||||
|
||||
public Task<RekorSubmissionResponse> SubmitToBackendAsync(DsseEnvelope envelope, string backend)
|
||||
{
|
||||
var response = CreateResponse(envelope);
|
||||
response.LogUrl = $"https://{backend}/api/v1/log/entries/{response.Uuid}";
|
||||
return Task.FromResult(response);
|
||||
}
|
||||
|
||||
public Task<SubmissionResult> TrySubmitAsync(DsseEnvelope envelope)
|
||||
{
|
||||
if (SimulateUnavailable)
|
||||
{
|
||||
return Task.FromResult(new SubmissionResult(false,
|
||||
ErrorCode: "REKOR_UNAVAILABLE",
|
||||
ErrorMessage: "Rekor transparency log unavailable"));
|
||||
}
|
||||
|
||||
if (SimulateTimeout)
|
||||
{
|
||||
return Task.FromResult(new SubmissionResult(false,
|
||||
ErrorCode: "REKOR_TIMEOUT",
|
||||
ErrorMessage: "Request to Rekor timed out"));
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(envelope.PayloadType) || string.IsNullOrEmpty(envelope.Payload))
|
||||
{
|
||||
return Task.FromResult(new SubmissionResult(false,
|
||||
ErrorCode: "REKOR_INVALID_ENTRY",
|
||||
ErrorMessage: "Invalid DSSE envelope: payload type and payload are required"));
|
||||
}
|
||||
|
||||
var response = CreateResponse(envelope);
|
||||
return Task.FromResult(new SubmissionResult(true, response));
|
||||
}
|
||||
|
||||
private RekorSubmissionResponse CreateResponse(DsseEnvelope envelope)
|
||||
{
|
||||
// Generate UUID from payload hash for idempotency
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
var hash = SHA256.HashData(payloadBytes);
|
||||
var uuid = Convert.ToHexString(hash).ToLower();
|
||||
|
||||
// Check for existing entry (idempotency)
|
||||
if (EnableIdempotency && _entries.TryGetValue(uuid, out var existing))
|
||||
{
|
||||
return existing;
|
||||
}
|
||||
|
||||
var index = _nextIndex++;
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
// Generate merkle tree components
|
||||
var leafHash = SHA256.HashData(payloadBytes);
|
||||
var rootHash = SHA256.HashData(leafHash); // Simplified for testing
|
||||
|
||||
var response = new RekorSubmissionResponse
|
||||
{
|
||||
Uuid = uuid,
|
||||
Index = index,
|
||||
LogUrl = $"https://rekor.sigstore.dev/api/v1/log/entries/{uuid}",
|
||||
Status = "included",
|
||||
IntegratedTime = now.ToUnixTimeSeconds(),
|
||||
Proof = new RekorProofResponse
|
||||
{
|
||||
Checkpoint = new RekorCheckpoint
|
||||
{
|
||||
Origin = "rekor.sigstore.dev - 2605736670972794746",
|
||||
Size = index + 1,
|
||||
RootHash = Convert.ToHexString(rootHash).ToLower(),
|
||||
Timestamp = now
|
||||
},
|
||||
Inclusion = new RekorInclusionProof
|
||||
{
|
||||
LeafHash = Convert.ToHexString(leafHash).ToLower(),
|
||||
Path = new[]
|
||||
{
|
||||
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"node-{index}-1"))).ToLower(),
|
||||
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"node-{index}-2"))).ToLower(),
|
||||
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"node-{index}-3"))).ToLower()
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (EnableIdempotency)
|
||||
{
|
||||
_entries[uuid] = response;
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,642 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorReceiptVerificationTests.cs
|
||||
// Sprint: SPRINT_5100_0009_0007 - Attestor Module Test Implementation
|
||||
// Task: ATTESTOR-5100-007 - Add Rekor receipt verification tests: valid receipt → verification succeeds; invalid receipt → fails
|
||||
// Description: Tests for Rekor transparency log receipt verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Attestor.Tests.Rekor;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for Rekor receipt verification workflow.
|
||||
/// Validates:
|
||||
/// - Valid receipts verify successfully
|
||||
/// - Invalid/tampered receipts fail verification
|
||||
/// - Verification checks all required fields
|
||||
/// - Error codes are deterministic
|
||||
/// </summary>
|
||||
[Trait("Category", "Rekor")]
|
||||
[Trait("Category", "ReceiptVerification")]
|
||||
[Trait("Category", "L0")]
|
||||
public sealed class RekorReceiptVerificationTests
|
||||
{
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public RekorReceiptVerificationTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
#region Valid Receipt Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_ValidReceipt_ReturnsSuccess()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue("valid receipt should verify");
|
||||
result.ErrorCode.Should().BeNullOrEmpty();
|
||||
|
||||
_output.WriteLine("✓ Valid receipt verified successfully");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_ValidReceipt_ReturnsVerificationDetails()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.LogIndex.Should().Be(receipt.Index);
|
||||
result.Uuid.Should().Be(receipt.Uuid);
|
||||
result.IntegratedTime.Should().NotBeNull();
|
||||
|
||||
_output.WriteLine($"Verified entry:");
|
||||
_output.WriteLine($" Index: {result.LogIndex}");
|
||||
_output.WriteLine($" UUID: {result.Uuid}");
|
||||
_output.WriteLine($" Integrated: {result.IntegratedTime}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Invalid Receipt Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_MissingUuid_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Uuid = ""; // Invalid
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_MISSING_UUID");
|
||||
|
||||
_output.WriteLine($"✓ Missing UUID detected: {result.ErrorCode}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_MissingIndex_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Index = null; // Invalid
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_MISSING_INDEX");
|
||||
|
||||
_output.WriteLine($"✓ Missing index detected: {result.ErrorCode}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_MissingProof_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Proof = null; // Invalid
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_MISSING_PROOF");
|
||||
|
||||
_output.WriteLine($"✓ Missing proof detected: {result.ErrorCode}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_MissingCheckpoint_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Proof!.Checkpoint = null; // Invalid
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_MISSING_CHECKPOINT");
|
||||
|
||||
_output.WriteLine($"✓ Missing checkpoint detected: {result.ErrorCode}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_MissingInclusionProof_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Proof!.Inclusion = null; // Invalid
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_MISSING_INCLUSION");
|
||||
|
||||
_output.WriteLine($"✓ Missing inclusion proof detected: {result.ErrorCode}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Tampered Receipt Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_TamperedRootHash_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
var originalHash = receipt.Proof!.Checkpoint!.RootHash;
|
||||
|
||||
// Tamper with root hash
|
||||
receipt.Proof.Checkpoint.RootHash = "0000000000000000000000000000000000000000000000000000000000000000";
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_INVALID_ROOT_HASH");
|
||||
|
||||
_output.WriteLine($"✓ Tampered root hash detected");
|
||||
_output.WriteLine($" Original: {originalHash}");
|
||||
_output.WriteLine($" Tampered: {receipt.Proof.Checkpoint.RootHash}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_TamperedLeafHash_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Tamper with leaf hash
|
||||
receipt.Proof!.Inclusion!.LeafHash = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff";
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_INVALID_LEAF_HASH");
|
||||
|
||||
_output.WriteLine($"✓ Tampered leaf hash detected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_TamperedInclusionPath_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Tamper with inclusion path
|
||||
receipt.Proof!.Inclusion!.Path = new[] { "0000000000000000000000000000000000000000000000000000000000000000" };
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_INVALID_INCLUSION_PATH");
|
||||
|
||||
_output.WriteLine($"✓ Tampered inclusion path detected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_TamperedIndex_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
var originalIndex = receipt.Index;
|
||||
|
||||
// Tamper with index
|
||||
receipt.Index = 999999;
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_INDEX_MISMATCH");
|
||||
|
||||
_output.WriteLine($"✓ Tampered index detected: {originalIndex} → {receipt.Index}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Time Validation Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_FutureIntegratedTime_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier { AllowedTimeSkew = TimeSpan.FromMinutes(5) };
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Set integrated time to far in the future
|
||||
receipt.IntegratedTime = DateTimeOffset.UtcNow.AddDays(1).ToUnixTimeSeconds();
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be("RECEIPT_TIME_SKEW");
|
||||
|
||||
_output.WriteLine($"✓ Future integrated time detected");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_SlightTimeSkew_StillValid()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier { AllowedTimeSkew = TimeSpan.FromMinutes(5) };
|
||||
var receipt = CreateValidReceipt();
|
||||
|
||||
// Set integrated time slightly in the future (within tolerance)
|
||||
receipt.IntegratedTime = DateTimeOffset.UtcNow.AddMinutes(2).ToUnixTimeSeconds();
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert - should still be valid within tolerance
|
||||
result.Success.Should().BeTrue("slight time skew should be allowed");
|
||||
|
||||
_output.WriteLine("✓ Slight time skew allowed within tolerance");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Deterministic Error Code Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("", "RECEIPT_MISSING_UUID")]
|
||||
[InlineData("invalid", "RECEIPT_INVALID_UUID_FORMAT")]
|
||||
public void VerifyReceipt_InvalidUuid_ReturnsDeterministicError(string uuid, string expectedError)
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Uuid = uuid;
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ErrorCode.Should().Be(expectedError);
|
||||
|
||||
_output.WriteLine($"UUID '{uuid}' → {expectedError}");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_ErrorCodeIsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Proof!.Checkpoint!.RootHash = "tampered";
|
||||
|
||||
// Act - verify multiple times
|
||||
var results = Enumerable.Range(0, 5)
|
||||
.Select(_ => verifier.Verify(receipt))
|
||||
.ToList();
|
||||
|
||||
// Assert - all error codes should be identical
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Success.Should().BeFalse();
|
||||
r.ErrorCode.Should().Be(results[0].ErrorCode);
|
||||
});
|
||||
|
||||
_output.WriteLine($"Deterministic error code: {results[0].ErrorCode}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Payload Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_WithPayload_VerifiesPayloadHash()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var payload = Encoding.UTF8.GetBytes("{\"test\":\"payload\"}");
|
||||
var receipt = CreateValidReceiptForPayload(payload);
|
||||
|
||||
// Act
|
||||
var result = verifier.VerifyWithPayload(receipt, payload);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue("payload hash should match");
|
||||
|
||||
_output.WriteLine("✓ Payload hash verified");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyReceipt_WithWrongPayload_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var originalPayload = Encoding.UTF8.GetBytes("{\"test\":\"payload\"}");
|
||||
var tamperedPayload = Encoding.UTF8.GetBytes("{\"test\":\"tampered\"}");
|
||||
var receipt = CreateValidReceiptForPayload(originalPayload);
|
||||
|
||||
// Act
|
||||
var result = verifier.VerifyWithPayload(receipt, tamperedPayload);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse("tampered payload should not match");
|
||||
result.ErrorCode.Should().Be("RECEIPT_PAYLOAD_MISMATCH");
|
||||
|
||||
_output.WriteLine("✓ Tampered payload detected");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Status Verification Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData("included", true)]
|
||||
[InlineData("pending", false)]
|
||||
[InlineData("rejected", false)]
|
||||
[InlineData("", false)]
|
||||
public void VerifyReceipt_Status_ValidatesCorrectly(string status, bool expectedValid)
|
||||
{
|
||||
// Arrange
|
||||
var verifier = new MockReceiptVerifier();
|
||||
var receipt = CreateValidReceipt();
|
||||
receipt.Status = status;
|
||||
|
||||
// Act
|
||||
var result = verifier.Verify(receipt);
|
||||
|
||||
// Assert
|
||||
if (expectedValid)
|
||||
{
|
||||
result.Success.Should().BeTrue($"status '{status}' should be valid");
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Success.Should().BeFalse($"status '{status}' should be invalid");
|
||||
result.ErrorCode.Should().Be("RECEIPT_NOT_INCLUDED");
|
||||
}
|
||||
|
||||
_output.WriteLine($"Status '{status}': {(expectedValid ? "valid" : "invalid")}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static RekorReceipt CreateValidReceipt()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var index = 12345L;
|
||||
|
||||
// Create deterministic hashes
|
||||
var leafData = Encoding.UTF8.GetBytes($"leaf-{index}");
|
||||
var leafHash = SHA256.HashData(leafData);
|
||||
var rootHash = SHA256.HashData(leafHash);
|
||||
|
||||
return new RekorReceipt
|
||||
{
|
||||
Uuid = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"entry-{index}"))).ToLower(),
|
||||
Index = index,
|
||||
LogUrl = "https://rekor.sigstore.dev/api/v1/log/entries/abc123",
|
||||
Status = "included",
|
||||
IntegratedTime = now.ToUnixTimeSeconds(),
|
||||
Proof = new RekorProof
|
||||
{
|
||||
Checkpoint = new RekorCheckpoint
|
||||
{
|
||||
Origin = "rekor.sigstore.dev - 2605736670972794746",
|
||||
Size = index + 1,
|
||||
RootHash = Convert.ToHexString(rootHash).ToLower(),
|
||||
Timestamp = now
|
||||
},
|
||||
Inclusion = new RekorInclusionProof
|
||||
{
|
||||
LeafHash = Convert.ToHexString(leafHash).ToLower(),
|
||||
Path = new[]
|
||||
{
|
||||
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"sibling-{index}-1"))).ToLower(),
|
||||
Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"sibling-{index}-2"))).ToLower()
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static RekorReceipt CreateValidReceiptForPayload(byte[] payload)
|
||||
{
|
||||
var receipt = CreateValidReceipt();
|
||||
var payloadHash = SHA256.HashData(payload);
|
||||
receipt.Proof!.Inclusion!.LeafHash = Convert.ToHexString(payloadHash).ToLower();
|
||||
receipt.Proof.Checkpoint!.RootHash = Convert.ToHexString(SHA256.HashData(payloadHash)).ToLower();
|
||||
return receipt;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mock Types
|
||||
|
||||
private sealed class RekorReceipt
|
||||
{
|
||||
public string Uuid { get; set; } = "";
|
||||
public long? Index { get; set; }
|
||||
public string? LogUrl { get; set; }
|
||||
public string Status { get; set; } = "included";
|
||||
public long? IntegratedTime { get; set; }
|
||||
public RekorProof? Proof { get; set; }
|
||||
}
|
||||
|
||||
private sealed class RekorProof
|
||||
{
|
||||
public RekorCheckpoint? Checkpoint { get; set; }
|
||||
public RekorInclusionProof? Inclusion { get; set; }
|
||||
}
|
||||
|
||||
private sealed class RekorCheckpoint
|
||||
{
|
||||
public string? Origin { get; set; }
|
||||
public long Size { get; set; }
|
||||
public string? RootHash { get; set; }
|
||||
public DateTimeOffset? Timestamp { get; set; }
|
||||
}
|
||||
|
||||
private sealed class RekorInclusionProof
|
||||
{
|
||||
public string? LeafHash { get; set; }
|
||||
public IReadOnlyList<string> Path { get; set; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
private sealed record VerificationResult(
|
||||
bool Success,
|
||||
string? ErrorCode = null,
|
||||
string? ErrorMessage = null,
|
||||
long? LogIndex = null,
|
||||
string? Uuid = null,
|
||||
DateTimeOffset? IntegratedTime = null);
|
||||
|
||||
private sealed class MockReceiptVerifier
|
||||
{
|
||||
public TimeSpan AllowedTimeSkew { get; set; } = TimeSpan.FromMinutes(5);
|
||||
|
||||
public VerificationResult Verify(RekorReceipt receipt)
|
||||
{
|
||||
// Check UUID
|
||||
if (string.IsNullOrEmpty(receipt.Uuid))
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_MISSING_UUID", "Receipt UUID is required");
|
||||
}
|
||||
|
||||
if (!IsValidUuidFormat(receipt.Uuid))
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_INVALID_UUID_FORMAT", "Receipt UUID format is invalid");
|
||||
}
|
||||
|
||||
// Check index
|
||||
if (!receipt.Index.HasValue)
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_MISSING_INDEX", "Receipt index is required");
|
||||
}
|
||||
|
||||
// Check status
|
||||
if (receipt.Status != "included")
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_NOT_INCLUDED", $"Receipt status is '{receipt.Status}', expected 'included'");
|
||||
}
|
||||
|
||||
// Check proof structure
|
||||
if (receipt.Proof == null)
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_MISSING_PROOF", "Receipt proof is required");
|
||||
}
|
||||
|
||||
if (receipt.Proof.Checkpoint == null)
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_MISSING_CHECKPOINT", "Receipt checkpoint is required");
|
||||
}
|
||||
|
||||
if (receipt.Proof.Inclusion == null)
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_MISSING_INCLUSION", "Receipt inclusion proof is required");
|
||||
}
|
||||
|
||||
// Verify checkpoint hash format
|
||||
if (!IsValidHashFormat(receipt.Proof.Checkpoint.RootHash))
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_INVALID_ROOT_HASH", "Root hash format is invalid");
|
||||
}
|
||||
|
||||
// Verify leaf hash format
|
||||
if (!IsValidHashFormat(receipt.Proof.Inclusion.LeafHash))
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_INVALID_LEAF_HASH", "Leaf hash format is invalid");
|
||||
}
|
||||
|
||||
// Verify inclusion path
|
||||
if (!receipt.Proof.Inclusion.Path.All(IsValidHashFormat))
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_INVALID_INCLUSION_PATH", "Inclusion path contains invalid hashes");
|
||||
}
|
||||
|
||||
// Verify index matches checkpoint size
|
||||
if (receipt.Index >= receipt.Proof.Checkpoint.Size)
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_INDEX_MISMATCH", "Index is inconsistent with checkpoint size");
|
||||
}
|
||||
|
||||
// Verify time is not too far in the future
|
||||
if (receipt.IntegratedTime.HasValue)
|
||||
{
|
||||
var integratedTime = DateTimeOffset.FromUnixTimeSeconds(receipt.IntegratedTime.Value);
|
||||
if (integratedTime > DateTimeOffset.UtcNow.Add(AllowedTimeSkew))
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_TIME_SKEW", "Integrated time is too far in the future");
|
||||
}
|
||||
}
|
||||
|
||||
return new VerificationResult(
|
||||
true,
|
||||
LogIndex: receipt.Index,
|
||||
Uuid: receipt.Uuid,
|
||||
IntegratedTime: receipt.IntegratedTime.HasValue
|
||||
? DateTimeOffset.FromUnixTimeSeconds(receipt.IntegratedTime.Value)
|
||||
: null);
|
||||
}
|
||||
|
||||
public VerificationResult VerifyWithPayload(RekorReceipt receipt, byte[] payload)
|
||||
{
|
||||
var basicResult = Verify(receipt);
|
||||
if (!basicResult.Success)
|
||||
{
|
||||
return basicResult;
|
||||
}
|
||||
|
||||
// Verify payload hash matches leaf hash
|
||||
var payloadHash = Convert.ToHexString(SHA256.HashData(payload)).ToLower();
|
||||
if (receipt.Proof!.Inclusion!.LeafHash != payloadHash)
|
||||
{
|
||||
return new VerificationResult(false, "RECEIPT_PAYLOAD_MISMATCH",
|
||||
"Payload hash does not match receipt leaf hash");
|
||||
}
|
||||
|
||||
return basicResult;
|
||||
}
|
||||
|
||||
private static bool IsValidUuidFormat(string uuid)
|
||||
{
|
||||
// Rekor UUIDs are 64 hex characters
|
||||
return !string.IsNullOrEmpty(uuid) &&
|
||||
uuid.Length == 64 &&
|
||||
uuid.All(c => char.IsAsciiHexDigitLower(c));
|
||||
}
|
||||
|
||||
private static bool IsValidHashFormat(string? hash)
|
||||
{
|
||||
// SHA-256 hashes are 64 hex characters
|
||||
return !string.IsNullOrEmpty(hash) &&
|
||||
hash.Length == 64 &&
|
||||
hash.All(c => char.IsAsciiHexDigitLower(c));
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user