work work hard work
This commit is contained in:
@@ -0,0 +1,25 @@
|
||||
# StellaOps.Attestor.Persistence — Local Agent Charter
|
||||
|
||||
## Scope
|
||||
- This charter applies to `src/Attestor/__Libraries/StellaOps.Attestor.Persistence/**`.
|
||||
|
||||
## Primary roles
|
||||
- Backend engineer (C# / .NET 10, EF Core, Npgsql).
|
||||
- QA automation engineer (xUnit) for persistence + matcher logic.
|
||||
|
||||
## Required reading (treat as read before edits)
|
||||
- `docs/modules/attestor/architecture.md`
|
||||
- `docs/db/SPECIFICATION.md`
|
||||
- `docs/db/MIGRATION_STRATEGY.md`
|
||||
- PostgreSQL 16 docs (arrays, indexes, JSONB, query plans).
|
||||
|
||||
## Working agreements
|
||||
- Determinism is mandatory where hashes/IDs are produced; all timestamps are UTC.
|
||||
- Offline-friendly defaults: no network calls from library code paths.
|
||||
- Migrations must be idempotent and safe to re-run.
|
||||
- Prefer small, composable services with explicit interfaces (`I*`).
|
||||
|
||||
## Testing expectations
|
||||
- Unit/integration tests live in `src/Attestor/__Tests/StellaOps.Attestor.Persistence.Tests`.
|
||||
- Perf dataset and query harness lives under `src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf` and must be deterministic (fixed data, fixed sizes, documented parameters).
|
||||
|
||||
@@ -5,6 +5,9 @@
|
||||
-- Create schema
|
||||
CREATE SCHEMA IF NOT EXISTS proofchain;
|
||||
|
||||
-- Required for gen_random_uuid() defaults
|
||||
CREATE EXTENSION IF NOT EXISTS pgcrypto;
|
||||
|
||||
-- Create verification_result enum type
|
||||
DO $$
|
||||
BEGIN
|
||||
|
||||
@@ -0,0 +1,18 @@
|
||||
# ProofChain DB perf harness
|
||||
|
||||
This folder provides a deterministic, production-like dataset and a small harness to validate index/query performance for the ProofChain schema (`proofchain.*`).
|
||||
|
||||
## Files
|
||||
- `seed.sql` – deterministic dataset generator (uses SQL functions + `generate_series`).
|
||||
- `queries.sql` – representative queries with `EXPLAIN (ANALYZE, BUFFERS)`.
|
||||
- `run-perf.ps1` – starts a local PostgreSQL 16 container, applies migrations, seeds data, runs queries, and captures output.
|
||||
|
||||
## Run
|
||||
From repo root:
|
||||
|
||||
```powershell
|
||||
pwsh -File src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf/run-perf.ps1
|
||||
```
|
||||
|
||||
Output is written to `docs/db/reports/proofchain-schema-perf-2025-12-17.md`.
|
||||
|
||||
@@ -0,0 +1,57 @@
|
||||
-- Representative query set for ProofChain schema perf validation.
|
||||
-- Run after applying migrations + seeding (`seed.sql`).
|
||||
|
||||
\timing on
|
||||
|
||||
-- Row counts
|
||||
SELECT
|
||||
(SELECT count(*) FROM proofchain.trust_anchors) AS trust_anchors,
|
||||
(SELECT count(*) FROM proofchain.sbom_entries) AS sbom_entries,
|
||||
(SELECT count(*) FROM proofchain.dsse_envelopes) AS dsse_envelopes,
|
||||
(SELECT count(*) FROM proofchain.spines) AS spines,
|
||||
(SELECT count(*) FROM proofchain.rekor_entries) AS rekor_entries;
|
||||
|
||||
-- 1) SBOM entry lookup via unique constraint (bom_digest, purl, version)
|
||||
EXPLAIN (ANALYZE, BUFFERS)
|
||||
SELECT entry_id, bom_digest, purl, version
|
||||
FROM proofchain.sbom_entries
|
||||
WHERE bom_digest = proofchain.hex64('bom:1')
|
||||
AND purl = format('pkg:npm/vendor-%02s/pkg-%05s', 1, 1)
|
||||
AND version = '1.0.1';
|
||||
|
||||
-- 2) Fetch all entries for a given SBOM digest (index on bom_digest)
|
||||
EXPLAIN (ANALYZE, BUFFERS)
|
||||
SELECT entry_id, purl, version
|
||||
FROM proofchain.sbom_entries
|
||||
WHERE bom_digest = proofchain.hex64('bom:1')
|
||||
ORDER BY purl
|
||||
LIMIT 100;
|
||||
|
||||
-- 3) Envelopes for entry + predicate (compound index)
|
||||
EXPLAIN (ANALYZE, BUFFERS)
|
||||
SELECT env_id, predicate_type, signer_keyid, body_hash
|
||||
FROM proofchain.dsse_envelopes
|
||||
WHERE entry_id = proofchain.uuid_from_text('entry:1')
|
||||
AND predicate_type = 'evidence.stella/v1';
|
||||
|
||||
-- 4) Spine lookup via bundle_id (unique index)
|
||||
EXPLAIN (ANALYZE, BUFFERS)
|
||||
SELECT entry_id, bundle_id, policy_version
|
||||
FROM proofchain.spines
|
||||
WHERE bundle_id = proofchain.hex64('bundle:1');
|
||||
|
||||
-- 5) Rekor lookup by log index (index)
|
||||
EXPLAIN (ANALYZE, BUFFERS)
|
||||
SELECT dsse_sha256, uuid, integrated_time
|
||||
FROM proofchain.rekor_entries
|
||||
WHERE log_index = 10;
|
||||
|
||||
-- 6) Join: entries -> envelopes by bom_digest
|
||||
EXPLAIN (ANALYZE, BUFFERS)
|
||||
SELECT e.entry_id, d.predicate_type, d.body_hash
|
||||
FROM proofchain.sbom_entries e
|
||||
JOIN proofchain.dsse_envelopes d ON d.entry_id = e.entry_id
|
||||
WHERE e.bom_digest = proofchain.hex64('bom:1')
|
||||
AND d.predicate_type = 'evidence.stella/v1'
|
||||
ORDER BY e.purl
|
||||
LIMIT 100;
|
||||
@@ -0,0 +1,104 @@
|
||||
param(
|
||||
[string]$PostgresImage = "postgres:16",
|
||||
[string]$ContainerName = "stellaops-proofchain-perf",
|
||||
[int]$Port = 54329,
|
||||
[string]$Database = "proofchain_perf",
|
||||
[string]$User = "postgres",
|
||||
[string]$Password = "postgres"
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
function Resolve-RepoRoot {
|
||||
$here = Split-Path -Parent $PSCommandPath
|
||||
return (Resolve-Path (Join-Path $here "../../../../..")).Path
|
||||
}
|
||||
|
||||
$repoRoot = Resolve-RepoRoot
|
||||
$perfDir = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf"
|
||||
$migrationFile = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Migrations/20251214000001_AddProofChainSchema.sql"
|
||||
$seedFile = Join-Path $perfDir "seed.sql"
|
||||
$queriesFile = Join-Path $perfDir "queries.sql"
|
||||
$reportFile = Join-Path $repoRoot "docs/db/reports/proofchain-schema-perf-2025-12-17.md"
|
||||
|
||||
Write-Host "Using repo root: $repoRoot"
|
||||
Write-Host "Starting PostgreSQL container '$ContainerName' on localhost:$Port..."
|
||||
|
||||
try {
|
||||
docker rm -f $ContainerName *> $null 2>&1
|
||||
} catch {}
|
||||
|
||||
$null = docker run --rm -d --name $ContainerName `
|
||||
-e POSTGRES_PASSWORD=$Password `
|
||||
-e POSTGRES_DB=$Database `
|
||||
-p ${Port}:5432 `
|
||||
$PostgresImage
|
||||
|
||||
try {
|
||||
$ready = $false
|
||||
for ($i = 0; $i -lt 60; $i++) {
|
||||
docker exec $ContainerName pg_isready -U $User -d $Database *> $null 2>&1
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$ready = $true
|
||||
break
|
||||
}
|
||||
Start-Sleep -Seconds 1
|
||||
}
|
||||
|
||||
if (-not $ready) {
|
||||
throw "PostgreSQL did not become ready within 60 seconds."
|
||||
}
|
||||
|
||||
Write-Host "Applying migrations..."
|
||||
$migrationSql = Get-Content -Raw -Encoding UTF8 $migrationFile
|
||||
$migrationSql | docker exec -i $ContainerName psql -v ON_ERROR_STOP=1 -U $User -d $Database | Out-Host
|
||||
|
||||
Write-Host "Seeding deterministic dataset..."
|
||||
$seedSql = Get-Content -Raw -Encoding UTF8 $seedFile
|
||||
$seedSql | docker exec -i $ContainerName psql -v ON_ERROR_STOP=1 -U $User -d $Database | Out-Host
|
||||
|
||||
Write-Host "Running query suite..."
|
||||
$queriesSql = Get-Content -Raw -Encoding UTF8 $queriesFile
|
||||
$queryOutput = $queriesSql | docker exec -i $ContainerName psql -v ON_ERROR_STOP=1 -U $User -d $Database
|
||||
|
||||
$queryOutputText = ($queryOutput -join "`n").TrimEnd()
|
||||
$headerLines = @(
|
||||
'# ProofChain schema performance report (2025-12-17)',
|
||||
'',
|
||||
'## Environment',
|
||||
('- Postgres image: `{0}`' -f $PostgresImage),
|
||||
('- DB: `{0}`' -f $Database),
|
||||
('- Port: `{0}`' -f $Port),
|
||||
'- Host: `localhost`',
|
||||
'',
|
||||
'## Dataset',
|
||||
'- Source: `src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf/seed.sql`',
|
||||
'- Rows:',
|
||||
' - `trust_anchors`: 50',
|
||||
' - `sbom_entries`: 20000',
|
||||
' - `dsse_envelopes`: 60000',
|
||||
' - `spines`: 20000',
|
||||
' - `rekor_entries`: 2000',
|
||||
'',
|
||||
'## Query Output',
|
||||
'',
|
||||
'```text',
|
||||
$queryOutputText,
|
||||
'```',
|
||||
''
|
||||
)
|
||||
|
||||
$header = ($headerLines -join "`n")
|
||||
|
||||
$dir = Split-Path -Parent $reportFile
|
||||
if (!(Test-Path $dir)) {
|
||||
New-Item -ItemType Directory -Path $dir -Force | Out-Null
|
||||
}
|
||||
|
||||
Set-Content -Path $reportFile -Value $header -Encoding UTF8
|
||||
Write-Host "Wrote report: $reportFile"
|
||||
}
|
||||
finally {
|
||||
Write-Host "Stopping container..."
|
||||
docker rm -f $ContainerName *> $null 2>&1
|
||||
}
|
||||
@@ -0,0 +1,166 @@
|
||||
-- Deterministic ProofChain dataset generator (offline-friendly).
|
||||
-- Designed for index/query perf validation (SPRINT_0501_0006_0001 · PROOF-DB-0011).
|
||||
|
||||
-- Helper: deterministic UUID from text (no extensions required).
|
||||
CREATE OR REPLACE FUNCTION proofchain.uuid_from_text(input text) RETURNS uuid
|
||||
LANGUAGE SQL
|
||||
IMMUTABLE
|
||||
STRICT
|
||||
AS $$
|
||||
SELECT (
|
||||
substring(md5(input), 1, 8) || '-' ||
|
||||
substring(md5(input), 9, 4) || '-' ||
|
||||
substring(md5(input), 13, 4) || '-' ||
|
||||
substring(md5(input), 17, 4) || '-' ||
|
||||
substring(md5(input), 21, 12)
|
||||
)::uuid;
|
||||
$$;
|
||||
|
||||
-- Helper: deterministic 64-hex string from text.
|
||||
CREATE OR REPLACE FUNCTION proofchain.hex64(input text) RETURNS text
|
||||
LANGUAGE SQL
|
||||
IMMUTABLE
|
||||
STRICT
|
||||
AS $$
|
||||
SELECT md5(input) || md5(input || ':2');
|
||||
$$;
|
||||
|
||||
-- Parameters
|
||||
-- Anchors: 50
|
||||
-- SBOM entries: 20_000 (200 SBOM digests * 100 entries each)
|
||||
-- Envelopes: 60_000 (3 per entry)
|
||||
-- Spines: 20_000 (1 per entry)
|
||||
-- Rekor entries: 2_000 (every 10th entry)
|
||||
|
||||
-- Trust anchors
|
||||
INSERT INTO proofchain.trust_anchors(
|
||||
anchor_id,
|
||||
purl_pattern,
|
||||
allowed_keyids,
|
||||
allowed_predicate_types,
|
||||
policy_ref,
|
||||
policy_version,
|
||||
revoked_keys,
|
||||
is_active,
|
||||
created_at,
|
||||
updated_at
|
||||
)
|
||||
SELECT
|
||||
proofchain.uuid_from_text('anchor:' || i),
|
||||
format('pkg:npm/vendor-%02s/*', i),
|
||||
ARRAY[format('key-%02s', i)]::text[],
|
||||
ARRAY[
|
||||
'evidence.stella/v1',
|
||||
'reasoning.stella/v1',
|
||||
'cdx-vex.stella/v1',
|
||||
'proofspine.stella/v1',
|
||||
'verdict.stella/v1',
|
||||
'https://stella-ops.org/predicates/sbom-linkage/v1'
|
||||
]::text[],
|
||||
format('policy-%02s', i),
|
||||
'v2025.12',
|
||||
ARRAY[]::text[],
|
||||
TRUE,
|
||||
TIMESTAMPTZ '2025-12-17T00:00:00Z',
|
||||
TIMESTAMPTZ '2025-12-17T00:00:00Z'
|
||||
FROM generate_series(1, 50) i
|
||||
ON CONFLICT (anchor_id) DO NOTHING;
|
||||
|
||||
-- SBOM entries
|
||||
INSERT INTO proofchain.sbom_entries(
|
||||
entry_id,
|
||||
bom_digest,
|
||||
purl,
|
||||
version,
|
||||
artifact_digest,
|
||||
trust_anchor_id,
|
||||
created_at
|
||||
)
|
||||
SELECT
|
||||
proofchain.uuid_from_text('entry:' || i),
|
||||
proofchain.hex64('bom:' || (((i - 1) / 100) + 1)),
|
||||
format('pkg:npm/vendor-%02s/pkg-%05s', (((i - 1) % 50) + 1), i),
|
||||
format('1.0.%s', (((i - 1) % 50) + 1)),
|
||||
proofchain.hex64('artifact:' || i),
|
||||
proofchain.uuid_from_text('anchor:' || (((i - 1) % 50) + 1)),
|
||||
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval
|
||||
FROM generate_series(1, 20000) i
|
||||
ON CONFLICT ON CONSTRAINT uq_sbom_entry DO NOTHING;
|
||||
|
||||
-- DSSE envelopes (3 per entry)
|
||||
INSERT INTO proofchain.dsse_envelopes(
|
||||
env_id,
|
||||
entry_id,
|
||||
predicate_type,
|
||||
signer_keyid,
|
||||
body_hash,
|
||||
envelope_blob_ref,
|
||||
signed_at,
|
||||
created_at
|
||||
)
|
||||
SELECT
|
||||
proofchain.uuid_from_text('env:' || i || ':' || p.predicate_type),
|
||||
proofchain.uuid_from_text('entry:' || i),
|
||||
p.predicate_type,
|
||||
format('key-%02s', (((i - 1) % 50) + 1)),
|
||||
proofchain.hex64('body:' || i || ':' || p.predicate_type),
|
||||
format('oci://proofchain/blobs/%s', proofchain.hex64('body:' || i || ':' || p.predicate_type)),
|
||||
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval,
|
||||
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval
|
||||
FROM generate_series(1, 20000) i
|
||||
CROSS JOIN (
|
||||
VALUES
|
||||
('evidence.stella/v1'),
|
||||
('reasoning.stella/v1'),
|
||||
('cdx-vex.stella/v1')
|
||||
) AS p(predicate_type)
|
||||
ON CONFLICT ON CONSTRAINT uq_dsse_envelope DO NOTHING;
|
||||
|
||||
-- Spines (1 per entry)
|
||||
INSERT INTO proofchain.spines(
|
||||
entry_id,
|
||||
bundle_id,
|
||||
evidence_ids,
|
||||
reasoning_id,
|
||||
vex_id,
|
||||
anchor_id,
|
||||
policy_version,
|
||||
created_at
|
||||
)
|
||||
SELECT
|
||||
proofchain.uuid_from_text('entry:' || i),
|
||||
proofchain.hex64('bundle:' || i),
|
||||
ARRAY[
|
||||
'sha256:' || proofchain.hex64('evidence:' || i || ':1'),
|
||||
'sha256:' || proofchain.hex64('evidence:' || i || ':2'),
|
||||
'sha256:' || proofchain.hex64('evidence:' || i || ':3')
|
||||
]::text[],
|
||||
proofchain.hex64('reasoning:' || i),
|
||||
proofchain.hex64('vex:' || i),
|
||||
proofchain.uuid_from_text('anchor:' || (((i - 1) % 50) + 1)),
|
||||
'v2025.12',
|
||||
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval
|
||||
FROM generate_series(1, 20000) i
|
||||
ON CONFLICT ON CONSTRAINT uq_spine_bundle DO NOTHING;
|
||||
|
||||
-- Rekor entries (every 10th entry, points at the evidence envelope)
|
||||
INSERT INTO proofchain.rekor_entries(
|
||||
dsse_sha256,
|
||||
log_index,
|
||||
log_id,
|
||||
uuid,
|
||||
integrated_time,
|
||||
inclusion_proof,
|
||||
env_id
|
||||
)
|
||||
SELECT
|
||||
proofchain.hex64('rekor:' || i),
|
||||
i,
|
||||
'test-log',
|
||||
format('uuid-%s', i),
|
||||
1734393600 + i,
|
||||
'{"hashes":[],"treeSize":1,"rootHash":"00"}'::jsonb,
|
||||
proofchain.uuid_from_text('env:' || i || ':evidence.stella/v1')
|
||||
FROM generate_series(1, 20000, 10) i
|
||||
ON CONFLICT (dsse_sha256) DO NOTHING;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
using StellaOps.Attestor.Persistence.Repositories;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Services;
|
||||
|
||||
@@ -75,7 +76,7 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(purl);
|
||||
|
||||
var anchors = await _repository.GetActiveAnchorsAsync(cancellationToken);
|
||||
var anchors = await _repository.GetActiveTrustAnchorsAsync(cancellationToken);
|
||||
|
||||
TrustAnchorMatchResult? bestMatch = null;
|
||||
|
||||
@@ -284,14 +285,3 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface extension for trust anchor queries.
|
||||
/// </summary>
|
||||
public interface IProofChainRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets all active trust anchors.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<TrustAnchorEntity>> GetActiveAnchorsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
@@ -20,4 +20,8 @@
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Remove="Tests\\**\\*.cs" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -84,10 +84,15 @@ public abstract record ContentAddressedId
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest);
|
||||
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest)
|
||||
{
|
||||
public override string ToString() => base.ToString();
|
||||
}
|
||||
|
||||
public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public override string ToString() => base.ToString();
|
||||
|
||||
public new static ArtifactId Parse(string value) => new(ParseSha256(value));
|
||||
public static bool TryParse(string value, out ArtifactId? id) => TryParseSha256(value, out id);
|
||||
|
||||
@@ -122,21 +127,29 @@ public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Di
|
||||
|
||||
public sealed record EvidenceId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public override string ToString() => base.ToString();
|
||||
|
||||
public new static EvidenceId Parse(string value) => new(Sha256IdParser.Parse(value, "EvidenceID"));
|
||||
}
|
||||
|
||||
public sealed record ReasoningId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public override string ToString() => base.ToString();
|
||||
|
||||
public new static ReasoningId Parse(string value) => new(Sha256IdParser.Parse(value, "ReasoningID"));
|
||||
}
|
||||
|
||||
public sealed record VexVerdictId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public override string ToString() => base.ToString();
|
||||
|
||||
public new static VexVerdictId Parse(string value) => new(Sha256IdParser.Parse(value, "VEXVerdictID"));
|
||||
}
|
||||
|
||||
public sealed record ProofBundleId(string Digest) : ContentAddressedId("sha256", Digest)
|
||||
{
|
||||
public override string ToString() => base.ToString();
|
||||
|
||||
public new static ProofBundleId Parse(string value) => new(Sha256IdParser.Parse(value, "ProofBundleID"));
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,42 @@
|
||||
using System;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Signing;
|
||||
|
||||
internal static class DssePreAuthenticationEncoding
|
||||
{
|
||||
public static byte[] Compute(string payloadType, ReadOnlySpan<byte> payload)
|
||||
{
|
||||
static byte[] Cat(params byte[][] parts)
|
||||
{
|
||||
var len = 0;
|
||||
for (var i = 0; i < parts.Length; i++)
|
||||
{
|
||||
len += parts[i].Length;
|
||||
}
|
||||
|
||||
var buf = new byte[len];
|
||||
var offset = 0;
|
||||
for (var i = 0; i < parts.Length; i++)
|
||||
{
|
||||
var part = parts[i];
|
||||
Buffer.BlockCopy(part, 0, buf, offset, part.Length);
|
||||
offset += part.Length;
|
||||
}
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
static byte[] Utf8(string value) => Encoding.UTF8.GetBytes(value);
|
||||
|
||||
var header = Utf8("DSSEv1");
|
||||
var pt = Utf8(payloadType ?? string.Empty);
|
||||
var lenPt = Utf8(pt.Length.ToString(CultureInfo.InvariantCulture));
|
||||
var lenPayload = Utf8(payload.Length.ToString(CultureInfo.InvariantCulture));
|
||||
var space = new byte[] { (byte)' ' };
|
||||
|
||||
return Cat(header, space, lenPt, space, pt, space, lenPayload, space, payload.ToArray());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
using StellaOps.Attestor.Envelope;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Signing;
|
||||
|
||||
/// <summary>
|
||||
/// Provides key material for signing and verifying proof chain DSSE envelopes.
|
||||
/// </summary>
|
||||
public interface IProofChainKeyStore
|
||||
{
|
||||
/// <summary>
|
||||
/// Resolve the signing key for a given key profile.
|
||||
/// </summary>
|
||||
bool TryGetSigningKey(SigningKeyProfile profile, out EnvelopeKey key);
|
||||
|
||||
/// <summary>
|
||||
/// Resolve a verification key by key identifier.
|
||||
/// </summary>
|
||||
bool TryGetVerificationKey(string keyId, out EnvelopeKey key);
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Signing;
|
||||
@@ -55,16 +56,19 @@ public sealed record DsseEnvelope
|
||||
/// <summary>
|
||||
/// The payload type (always "application/vnd.in-toto+json").
|
||||
/// </summary>
|
||||
[JsonPropertyName("payloadType")]
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded payload (the statement JSON).
|
||||
/// </summary>
|
||||
[JsonPropertyName("payload")]
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signatures over the payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signatures")]
|
||||
public required IReadOnlyList<DsseSignature> Signatures { get; init; }
|
||||
}
|
||||
|
||||
@@ -76,11 +80,13 @@ public sealed record DsseSignature
|
||||
/// <summary>
|
||||
/// The key ID that produced this signature.
|
||||
/// </summary>
|
||||
[JsonPropertyName("keyid")]
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sig")]
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,196 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.Envelope;
|
||||
using StellaOps.Attestor.ProofChain.Json;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Signing;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation for creating and verifying DSSE envelopes for proof chain statements.
|
||||
/// </summary>
|
||||
public sealed class ProofChainSigner : IProofChainSigner
|
||||
{
|
||||
public const string InTotoPayloadType = "application/vnd.in-toto+json";
|
||||
|
||||
private static readonly JsonSerializerOptions StatementSerializerOptions = new()
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNamingPolicy = null,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly IProofChainKeyStore _keyStore;
|
||||
private readonly IJsonCanonicalizer _canonicalizer;
|
||||
private readonly EnvelopeSignatureService _signatureService;
|
||||
|
||||
public ProofChainSigner(
|
||||
IProofChainKeyStore keyStore,
|
||||
IJsonCanonicalizer canonicalizer,
|
||||
EnvelopeSignatureService? signatureService = null)
|
||||
{
|
||||
_keyStore = keyStore ?? throw new ArgumentNullException(nameof(keyStore));
|
||||
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
|
||||
_signatureService = signatureService ?? new EnvelopeSignatureService();
|
||||
}
|
||||
|
||||
public Task<DsseEnvelope> SignStatementAsync<T>(
|
||||
T statement,
|
||||
SigningKeyProfile keyProfile,
|
||||
CancellationToken ct = default) where T : InTotoStatement
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(statement);
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
if (!_keyStore.TryGetSigningKey(keyProfile, out var key))
|
||||
{
|
||||
throw new InvalidOperationException($"No signing key configured for profile '{keyProfile}'.");
|
||||
}
|
||||
|
||||
var statementJson = JsonSerializer.SerializeToUtf8Bytes(statement, statement.GetType(), StatementSerializerOptions);
|
||||
var canonicalPayload = _canonicalizer.Canonicalize(statementJson);
|
||||
|
||||
var pae = DssePreAuthenticationEncoding.Compute(InTotoPayloadType, canonicalPayload);
|
||||
var signatureResult = _signatureService.Sign(pae, key, ct);
|
||||
if (!signatureResult.IsSuccess)
|
||||
{
|
||||
throw new InvalidOperationException($"DSSE signing failed: {signatureResult.Error.Code} {signatureResult.Error.Message}");
|
||||
}
|
||||
|
||||
var signature = signatureResult.Value;
|
||||
return Task.FromResult(new DsseEnvelope
|
||||
{
|
||||
PayloadType = InTotoPayloadType,
|
||||
Payload = Convert.ToBase64String(canonicalPayload),
|
||||
Signatures =
|
||||
[
|
||||
new DsseSignature
|
||||
{
|
||||
KeyId = signature.KeyId,
|
||||
Sig = Convert.ToBase64String(signature.Value.Span)
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
public Task<SignatureVerificationResult> VerifyEnvelopeAsync(
|
||||
DsseEnvelope envelope,
|
||||
IReadOnlyList<string> allowedKeyIds,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(envelope);
|
||||
ArgumentNullException.ThrowIfNull(allowedKeyIds);
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
if (envelope.Signatures is null || envelope.Signatures.Count == 0)
|
||||
{
|
||||
return Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = string.Empty,
|
||||
ErrorMessage = "Envelope contains no signatures."
|
||||
});
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(envelope.Payload))
|
||||
{
|
||||
return Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = string.Empty,
|
||||
ErrorMessage = "Envelope payload is missing."
|
||||
});
|
||||
}
|
||||
|
||||
byte[] payloadBytes;
|
||||
try
|
||||
{
|
||||
payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
return Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = string.Empty,
|
||||
ErrorMessage = $"Envelope payload is not valid base64: {ex.Message}"
|
||||
});
|
||||
}
|
||||
|
||||
var pae = DssePreAuthenticationEncoding.Compute(envelope.PayloadType, payloadBytes);
|
||||
var allowAnyKey = allowedKeyIds.Count == 0;
|
||||
var allowedSet = allowAnyKey ? null : new HashSet<string>(allowedKeyIds, StringComparer.Ordinal);
|
||||
|
||||
string? lastError = null;
|
||||
foreach (var signature in envelope.Signatures.OrderBy(static s => s.KeyId, StringComparer.Ordinal))
|
||||
{
|
||||
if (signature is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!allowAnyKey && !allowedSet!.Contains(signature.KeyId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!_keyStore.TryGetVerificationKey(signature.KeyId, out var verificationKey))
|
||||
{
|
||||
lastError = $"No verification key available for keyid '{signature.KeyId}'.";
|
||||
continue;
|
||||
}
|
||||
|
||||
byte[] signatureBytes;
|
||||
try
|
||||
{
|
||||
signatureBytes = Convert.FromBase64String(signature.Sig);
|
||||
}
|
||||
catch (FormatException ex)
|
||||
{
|
||||
lastError = $"Signature for keyid '{signature.KeyId}' is not valid base64: {ex.Message}";
|
||||
continue;
|
||||
}
|
||||
|
||||
var envelopeSignature = new EnvelopeSignature(signature.KeyId, verificationKey.AlgorithmId, signatureBytes);
|
||||
var verificationResult = _signatureService.Verify(pae, envelopeSignature, verificationKey, ct);
|
||||
|
||||
if (verificationResult.IsSuccess)
|
||||
{
|
||||
return Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
IsValid = true,
|
||||
KeyId = signature.KeyId
|
||||
});
|
||||
}
|
||||
|
||||
lastError = verificationResult.Error.Message;
|
||||
}
|
||||
|
||||
if (!allowAnyKey)
|
||||
{
|
||||
var hasAllowed = envelope.Signatures.Any(s => allowedSet!.Contains(s.KeyId));
|
||||
if (!hasAllowed)
|
||||
{
|
||||
return Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = string.Empty,
|
||||
ErrorMessage = "No signatures match the allowed key IDs."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(new SignatureVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = string.Empty,
|
||||
ErrorMessage = lastError ?? "No valid signature found."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,4 +8,12 @@
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -133,21 +133,26 @@ public sealed class VerificationPipeline : IVerificationPipeline
|
||||
var pipelineDuration = _timeProvider.GetUtcNow() - pipelineStartTime;
|
||||
|
||||
// Generate receipt
|
||||
var anchorId = context.TrustAnchorId ?? request.TrustAnchorId ?? new TrustAnchorId(Guid.Empty);
|
||||
var checks = stepResults.Select(step => new VerificationCheck
|
||||
{
|
||||
Check = step.StepName,
|
||||
Status = step.Passed ? VerificationResult.Pass : VerificationResult.Fail,
|
||||
KeyId = step.KeyId,
|
||||
Expected = step.Expected,
|
||||
Actual = step.Actual,
|
||||
LogIndex = step.LogIndex,
|
||||
Details = step.Passed ? step.Details : step.ErrorMessage
|
||||
}).ToList();
|
||||
|
||||
var receipt = new VerificationReceipt
|
||||
{
|
||||
ReceiptId = GenerateReceiptId(),
|
||||
Result = overallPassed ? VerificationResult.Pass : VerificationResult.Fail,
|
||||
ProofBundleId = request.ProofBundleId,
|
||||
VerifiedAt = pipelineStartTime,
|
||||
VerifierVersion = request.VerifierVersion,
|
||||
ProofBundleId = request.ProofBundleId.Value,
|
||||
FailureReason = failureReason,
|
||||
StepsSummary = stepResults.Select(s => new VerificationStepSummary
|
||||
{
|
||||
StepName = s.StepName,
|
||||
Passed = s.Passed,
|
||||
DurationMs = (int)s.Duration.TotalMilliseconds
|
||||
}).ToList(),
|
||||
TotalDurationMs = (int)pipelineDuration.TotalMilliseconds
|
||||
AnchorId = anchorId,
|
||||
Result = overallPassed ? VerificationResult.Pass : VerificationResult.Fail,
|
||||
Checks = checks
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
@@ -170,12 +175,6 @@ public sealed class VerificationPipeline : IVerificationPipeline
|
||||
ErrorMessage = "Verification cancelled"
|
||||
};
|
||||
|
||||
private static string GenerateReceiptId()
|
||||
{
|
||||
var bytes = new byte[16];
|
||||
RandomNumberGenerator.Fill(bytes);
|
||||
return $"receipt:{Convert.ToHexString(bytes).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -296,7 +295,7 @@ public sealed class IdRecomputationVerificationStep : IVerificationStep
|
||||
var recomputedId = ComputeProofBundleId(bundle);
|
||||
|
||||
// Compare with claimed ID
|
||||
var claimedId = context.ProofBundleId.Value;
|
||||
var claimedId = context.ProofBundleId.ToString();
|
||||
if (!recomputedId.Equals(claimedId, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new VerificationStepResult
|
||||
@@ -516,9 +515,19 @@ public sealed class TrustAnchorVerificationStep : IVerificationStep
|
||||
}
|
||||
|
||||
// Resolve trust anchor
|
||||
var anchor = context.TrustAnchorId is not null
|
||||
? await _trustAnchorResolver.GetAnchorAsync(context.TrustAnchorId.Value, ct)
|
||||
: await _trustAnchorResolver.FindAnchorForProofAsync(context.ProofBundleId, ct);
|
||||
TrustAnchorInfo? anchor;
|
||||
if (context.TrustAnchorId is TrustAnchorId anchorId)
|
||||
{
|
||||
anchor = await _trustAnchorResolver.GetAnchorAsync(anchorId.Value, ct);
|
||||
}
|
||||
else
|
||||
{
|
||||
anchor = await _trustAnchorResolver.FindAnchorForProofAsync(context.ProofBundleId, ct);
|
||||
if (anchor is not null)
|
||||
{
|
||||
context.TrustAnchorId = new TrustAnchorId(anchor.AnchorId);
|
||||
}
|
||||
}
|
||||
|
||||
if (anchor is null)
|
||||
{
|
||||
|
||||
@@ -0,0 +1,32 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Using Include="Xunit" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.Persistence\StellaOps.Attestor.Persistence.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -1,184 +1,143 @@
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
using StellaOps.Attestor.Persistence.Services;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
using NSubstitute;
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
using StellaOps.Attestor.Persistence.Repositories;
|
||||
using StellaOps.Attestor.Persistence.Services;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for proof chain database operations.
|
||||
/// SPRINT_0501_0006_0001 - Task #10
|
||||
/// Tests for trust anchor glob matching and allowlists.
|
||||
/// Sprint: SPRINT_0501_0006_0001_proof_chain_database_schema
|
||||
/// Task: PROOF-DB-0010
|
||||
/// </summary>
|
||||
public sealed class ProofChainRepositoryIntegrationTests
|
||||
public sealed class TrustAnchorMatcherTests
|
||||
{
|
||||
private readonly Mock<IProofChainRepository> _repositoryMock;
|
||||
private readonly IProofChainRepository _repository;
|
||||
private readonly TrustAnchorMatcher _matcher;
|
||||
|
||||
public ProofChainRepositoryIntegrationTests()
|
||||
public TrustAnchorMatcherTests()
|
||||
{
|
||||
_repositoryMock = new Mock<IProofChainRepository>();
|
||||
_matcher = new TrustAnchorMatcher(
|
||||
_repositoryMock.Object,
|
||||
NullLogger<TrustAnchorMatcher>.Instance);
|
||||
_repository = Substitute.For<IProofChainRepository>();
|
||||
_matcher = new TrustAnchorMatcher(_repository, NullLogger<TrustAnchorMatcher>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_ExactPattern_MatchesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/lodash@4.17.21", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(anchor.AnchorId, result.Anchor.AnchorId);
|
||||
result.Should().NotBeNull();
|
||||
result!.Anchor.AnchorId.Should().Be(anchor.AnchorId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_WildcardPattern_MatchesPackages()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("pkg:npm/*", result.MatchedPattern);
|
||||
result.Should().NotBeNull();
|
||||
result!.MatchedPattern.Should().Be("pkg:npm/*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_DoubleWildcard_MatchesNestedPaths()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/@scope/**", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/@scope/sub/package@1.0.0");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
result.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_MultipleMatches_ReturnsMoreSpecific()
|
||||
{
|
||||
// Arrange
|
||||
var genericAnchor = CreateAnchor("pkg:npm/*", ["key-generic"], "generic");
|
||||
var specificAnchor = CreateAnchor("pkg:npm/lodash@*", ["key-specific"], "specific");
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([genericAnchor, specificAnchor]);
|
||||
var genericAnchor = CreateAnchor("pkg:npm/*", ["key-generic"], policyRef: "generic");
|
||||
var specificAnchor = CreateAnchor("pkg:npm/lodash@*", ["key-specific"], policyRef: "specific");
|
||||
await SeedAnchors(genericAnchor, specificAnchor);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("specific", result.Anchor.PolicyRef);
|
||||
result.Should().NotBeNull();
|
||||
result!.Anchor.PolicyRef.Should().Be("specific");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_NoMatch_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:pypi/requests@2.28.0");
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsKeyAllowedAsync_AllowedKey_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1", "key-2"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
|
||||
|
||||
// Assert
|
||||
Assert.True(allowed);
|
||||
allowed.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsKeyAllowedAsync_DisallowedKey_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-unknown");
|
||||
|
||||
// Assert
|
||||
Assert.False(allowed);
|
||||
allowed.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsKeyAllowedAsync_RevokedKey_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"], revokedKeys: ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
|
||||
|
||||
// Assert
|
||||
Assert.False(allowed); // Key is revoked even if in allowed list
|
||||
allowed.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsPredicateAllowedAsync_NoRestrictions_AllowsAll()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
anchor.AllowedPredicateTypes = null;
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsPredicateAllowedAsync(
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
"https://in-toto.io/attestation/vulns/v0.1");
|
||||
|
||||
// Assert
|
||||
Assert.True(allowed);
|
||||
allowed.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsPredicateAllowedAsync_WithRestrictions_EnforcesAllowlist()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
anchor.AllowedPredicateTypes = ["evidence.stella/v1", "sbom.stella/v1"];
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(await _matcher.IsPredicateAllowedAsync(
|
||||
"pkg:npm/lodash@4.17.21", "evidence.stella/v1"));
|
||||
Assert.False(await _matcher.IsPredicateAllowedAsync(
|
||||
"pkg:npm/lodash@4.17.21", "random.predicate/v1"));
|
||||
(await _matcher.IsPredicateAllowedAsync("pkg:npm/lodash@4.17.21", "evidence.stella/v1")).Should().BeTrue();
|
||||
(await _matcher.IsPredicateAllowedAsync("pkg:npm/lodash@4.17.21", "random.predicate/v1")).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Theory]
|
||||
@@ -190,19 +149,21 @@ public sealed class ProofChainRepositoryIntegrationTests
|
||||
[InlineData("pkg:pypi/*", "pkg:npm/lodash@4.17.21", false)]
|
||||
[InlineData("pkg:npm/@scope/*", "pkg:npm/@scope/package@1.0.0", true)]
|
||||
[InlineData("pkg:npm/@scope/*", "pkg:npm/@other/package@1.0.0", false)]
|
||||
public async Task FindMatchAsync_PatternVariations_MatchCorrectly(
|
||||
string pattern, string purl, bool shouldMatch)
|
||||
public async Task FindMatchAsync_PatternVariations_MatchCorrectly(string pattern, string purl, bool shouldMatch)
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor(pattern, ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
await SeedAnchors(anchor);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync(purl);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(shouldMatch, result != null);
|
||||
(result != null).Should().Be(shouldMatch);
|
||||
}
|
||||
|
||||
private Task SeedAnchors(params TrustAnchorEntity[] anchors)
|
||||
{
|
||||
_repository.GetActiveTrustAnchorsAsync(Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult<IReadOnlyList<TrustAnchorEntity>>(anchors));
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private static TrustAnchorEntity CreateAnchor(
|
||||
@@ -217,7 +178,8 @@ public sealed class ProofChainRepositoryIntegrationTests
|
||||
PurlPattern = pattern,
|
||||
AllowedKeyIds = allowedKeys,
|
||||
PolicyRef = policyRef,
|
||||
RevokedKeys = revokedKeys ?? [],
|
||||
RevokedKeys = revokedKeys ?? []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,631 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright (c) 2025 StellaOps Contributors
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using NSubstitute;
|
||||
using StellaOps.Attestor.ProofChain;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
using StellaOps.Attestor.ProofChain.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Load tests for proof chain API endpoints and verification pipeline.
|
||||
/// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
|
||||
/// Task: PROOF-API-0012
|
||||
/// </summary>
|
||||
public class ApiLoadTests
|
||||
{
|
||||
private readonly ILogger<VerificationPipeline> _logger = NullLogger<VerificationPipeline>.Instance;
|
||||
|
||||
#region Proof Spine Creation Load Tests
|
||||
|
||||
[Fact]
|
||||
public async Task CreateProofSpine_ConcurrentRequests_MaintainsThroughput()
|
||||
{
|
||||
// Arrange: Create synthetic SBOM entries for load testing
|
||||
const int concurrencyLevel = 50;
|
||||
const int operationsPerClient = 20;
|
||||
var totalOperations = concurrencyLevel * operationsPerClient;
|
||||
|
||||
var proofSpineBuilder = CreateTestProofSpineBuilder();
|
||||
var latencies = new ConcurrentBag<long>();
|
||||
var errors = new ConcurrentBag<Exception>();
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
// Act: Run concurrent proof spine creations
|
||||
var tasks = Enumerable.Range(0, concurrencyLevel)
|
||||
.Select(clientId => Task.Run(async () =>
|
||||
{
|
||||
for (var i = 0; i < operationsPerClient; i++)
|
||||
{
|
||||
try
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var entryId = GenerateSyntheticEntryId(clientId, i);
|
||||
var spine = await proofSpineBuilder.BuildAsync(
|
||||
entryId,
|
||||
GenerateSyntheticEvidenceIds(3),
|
||||
$"sha256:{GenerateHash("reasoning")}",
|
||||
$"sha256:{GenerateHash("vex")}",
|
||||
"v2.3.1",
|
||||
CancellationToken.None);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.ElapsedMilliseconds);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add(ex);
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
stopwatch.Stop();
|
||||
|
||||
// Assert: Verify load test metrics
|
||||
var successCount = latencies.Count;
|
||||
var errorCount = errors.Count;
|
||||
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
|
||||
var avgLatency = latencies.Any() ? latencies.Average() : 0;
|
||||
var p95Latency = CalculatePercentile(latencies, 95);
|
||||
var p99Latency = CalculatePercentile(latencies, 99);
|
||||
|
||||
// Performance assertions
|
||||
successCount.Should().Be(totalOperations, "all operations should complete successfully");
|
||||
errorCount.Should().Be(0, "no errors should occur during load test");
|
||||
throughput.Should().BeGreaterThan(100, "throughput should exceed 100 ops/sec");
|
||||
avgLatency.Should().BeLessThan(50, "average latency should be under 50ms");
|
||||
p99Latency.Should().BeLessThan(200, "p99 latency should be under 200ms");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerificationPipeline_ConcurrentVerifications_MaintainsAccuracy()
|
||||
{
|
||||
// Arrange
|
||||
const int concurrencyLevel = 30;
|
||||
const int verificationsPerClient = 10;
|
||||
var totalVerifications = concurrencyLevel * verificationsPerClient;
|
||||
|
||||
var mockDsseVerifier = CreateMockDsseVerifier();
|
||||
var mockIdRecomputer = CreateMockIdRecomputer();
|
||||
var mockRekorVerifier = CreateMockRekorVerifier();
|
||||
var pipeline = new VerificationPipeline(
|
||||
mockDsseVerifier,
|
||||
mockIdRecomputer,
|
||||
mockRekorVerifier,
|
||||
_logger);
|
||||
|
||||
var results = new ConcurrentBag<VerificationResult>();
|
||||
var latencies = new ConcurrentBag<long>();
|
||||
|
||||
// Act: Run concurrent verifications
|
||||
var tasks = Enumerable.Range(0, concurrencyLevel)
|
||||
.Select(clientId => Task.Run(async () =>
|
||||
{
|
||||
for (var i = 0; i < verificationsPerClient; i++)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
var proof = GenerateSyntheticProof(clientId, i);
|
||||
var result = await pipeline.VerifyAsync(proof, CancellationToken.None);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.ElapsedMilliseconds);
|
||||
results.Add(result);
|
||||
}
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert: All verifications should be deterministic
|
||||
results.Count.Should().Be(totalVerifications);
|
||||
results.All(r => r.IsValid).Should().BeTrue("all synthetic proofs should verify successfully");
|
||||
|
||||
var avgLatency = latencies.Average();
|
||||
avgLatency.Should().BeLessThan(30, "verification should be fast");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Deterministic Ordering Tests Under Load
|
||||
|
||||
[Fact]
|
||||
public void ProofSpineOrdering_UnderConcurrency_RemainsDeterministic()
|
||||
{
|
||||
// Arrange: Same inputs should produce same outputs under concurrent access
|
||||
const int iterations = 100;
|
||||
var seed = 42;
|
||||
var random = new Random(seed);
|
||||
|
||||
var evidenceIds = Enumerable.Range(0, 5)
|
||||
.Select(i => $"sha256:{GenerateHash($"evidence{i}")}")
|
||||
.ToArray();
|
||||
|
||||
var results = new ConcurrentBag<string>();
|
||||
|
||||
// Act: Compute proof spine hash concurrently multiple times
|
||||
Parallel.For(0, iterations, _ =>
|
||||
{
|
||||
var sorted = evidenceIds.OrderBy(x => x).ToArray();
|
||||
var combined = string.Join(":", sorted);
|
||||
var hash = GenerateHash(combined);
|
||||
results.Add(hash);
|
||||
});
|
||||
|
||||
// Assert: All results should be identical (deterministic)
|
||||
results.Distinct().Count().Should().Be(1, "concurrent computations should be deterministic");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MerkleTree_ConcurrentBuilding_ProducesSameRoot()
|
||||
{
|
||||
// Arrange
|
||||
const int leafCount = 1000;
|
||||
const int iterations = 20;
|
||||
|
||||
var leaves = Enumerable.Range(0, leafCount)
|
||||
.Select(i => Encoding.UTF8.GetBytes($"leaf-{i:D5}"))
|
||||
.ToList();
|
||||
|
||||
var roots = new ConcurrentBag<string>();
|
||||
|
||||
// Act: Build Merkle tree concurrently
|
||||
await Parallel.ForEachAsync(Enumerable.Range(0, iterations), async (_, ct) =>
|
||||
{
|
||||
var builder = new MerkleTreeBuilder();
|
||||
foreach (var leaf in leaves)
|
||||
{
|
||||
builder.AddLeaf(leaf);
|
||||
}
|
||||
var root = builder.ComputeRoot();
|
||||
roots.Add(Convert.ToHexString(root));
|
||||
});
|
||||
|
||||
// Assert: All roots should be identical
|
||||
roots.Distinct().Count().Should().Be(1, "Merkle tree root should be deterministic");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Throughput Benchmarks
|
||||
|
||||
[Theory]
|
||||
[InlineData(10, 100)] // Light load
|
||||
[InlineData(50, 50)] // Medium load
|
||||
[InlineData(100, 20)] // Heavy load
|
||||
public async Task ThroughputBenchmark_VariousLoadProfiles(int concurrency, int opsPerClient)
|
||||
{
|
||||
// Arrange
|
||||
var totalOps = concurrency * opsPerClient;
|
||||
var successCount = 0;
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
// Act: Simulate API calls
|
||||
var tasks = Enumerable.Range(0, concurrency)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
for (var i = 0; i < opsPerClient; i++)
|
||||
{
|
||||
// Simulate proof creation work
|
||||
var hash = GenerateHash($"proof-{Guid.NewGuid()}");
|
||||
Interlocked.Increment(ref successCount);
|
||||
}
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
stopwatch.Stop();
|
||||
|
||||
// Assert
|
||||
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
|
||||
successCount.Should().Be(totalOps);
|
||||
throughput.Should().BeGreaterThan(1000, $"throughput at {concurrency} concurrency should exceed 1000 ops/sec");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LatencyDistribution_UnderLoad_MeetsSloBudgets()
|
||||
{
|
||||
// Arrange: Define SLO budgets
|
||||
const double maxP50Ms = 10;
|
||||
const double maxP90Ms = 25;
|
||||
const double maxP99Ms = 100;
|
||||
const int sampleSize = 1000;
|
||||
|
||||
var latencies = new ConcurrentBag<double>();
|
||||
|
||||
// Act: Collect latency samples
|
||||
await Parallel.ForEachAsync(Enumerable.Range(0, sampleSize), async (i, ct) =>
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
// Simulate verification work
|
||||
var hash = GenerateHash($"sample-{i}");
|
||||
await Task.Delay(1, ct); // Simulate I/O
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
});
|
||||
|
||||
// Calculate percentiles
|
||||
var sorted = latencies.OrderBy(x => x).ToList();
|
||||
var p50 = CalculatePercentileFromSorted(sorted, 50);
|
||||
var p90 = CalculatePercentileFromSorted(sorted, 90);
|
||||
var p99 = CalculatePercentileFromSorted(sorted, 99);
|
||||
|
||||
// Assert: SLO compliance
|
||||
p50.Should().BeLessThan(maxP50Ms, "p50 latency should meet SLO");
|
||||
p90.Should().BeLessThan(maxP90Ms, "p90 latency should meet SLO");
|
||||
p99.Should().BeLessThan(maxP99Ms, "p99 latency should meet SLO");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Memory and Resource Tests
|
||||
|
||||
[Fact]
|
||||
public void LargeProofBatch_DoesNotCauseMemorySpike()
|
||||
{
|
||||
// Arrange
|
||||
const int batchSize = 10_000;
|
||||
var initialMemory = GC.GetTotalMemory(true);
|
||||
|
||||
// Act: Create large batch of proofs
|
||||
var proofs = new List<string>(batchSize);
|
||||
for (var i = 0; i < batchSize; i++)
|
||||
{
|
||||
var proof = GenerateSyntheticProofJson(i);
|
||||
proofs.Add(proof);
|
||||
}
|
||||
|
||||
// Force GC and measure
|
||||
var peakMemory = GC.GetTotalMemory(false);
|
||||
proofs.Clear();
|
||||
GC.Collect();
|
||||
var finalMemory = GC.GetTotalMemory(true);
|
||||
|
||||
// Assert: Memory should not grow unbounded
|
||||
var memoryGrowth = peakMemory - initialMemory;
|
||||
var memoryRetained = finalMemory - initialMemory;
|
||||
|
||||
// Each proof is ~500 bytes, so 10k proofs ≈ 5MB is reasonable
|
||||
memoryGrowth.Should().BeLessThan(50_000_000, "memory growth should be bounded (~50MB max for 10k proofs)");
|
||||
memoryRetained.Should().BeLessThan(10_000_000, "memory should be released after clearing");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static IProofSpineBuilder CreateTestProofSpineBuilder()
|
||||
{
|
||||
// Create a mock proof spine builder for load testing
|
||||
var builder = Substitute.For<IProofSpineBuilder>();
|
||||
builder.BuildAsync(
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<string[]>(),
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<CancellationToken>())
|
||||
.Returns(callInfo =>
|
||||
{
|
||||
var entryId = callInfo.ArgAt<string>(0);
|
||||
return Task.FromResult(new ProofSpine
|
||||
{
|
||||
EntryId = entryId,
|
||||
SpineId = $"sha256:{GenerateHash(entryId)}",
|
||||
PolicyVersion = callInfo.ArgAt<string>(4),
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
});
|
||||
return builder;
|
||||
}
|
||||
|
||||
private static IDsseVerifier CreateMockDsseVerifier()
|
||||
{
|
||||
var verifier = Substitute.For<IDsseVerifier>();
|
||||
verifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult(new DsseVerificationResult { IsValid = true }));
|
||||
return verifier;
|
||||
}
|
||||
|
||||
private static IIdRecomputer CreateMockIdRecomputer()
|
||||
{
|
||||
var recomputer = Substitute.For<IIdRecomputer>();
|
||||
recomputer.VerifyAsync(Arg.Any<ProofBundle>(), Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult(new IdVerificationResult { IsValid = true }));
|
||||
return recomputer;
|
||||
}
|
||||
|
||||
private static IRekorVerifier CreateMockRekorVerifier()
|
||||
{
|
||||
var verifier = Substitute.For<IRekorVerifier>();
|
||||
verifier.VerifyInclusionAsync(Arg.Any<RekorEntry>(), Arg.Any<CancellationToken>())
|
||||
.Returns(Task.FromResult(new RekorVerificationResult { IsValid = true }));
|
||||
return verifier;
|
||||
}
|
||||
|
||||
private static string GenerateSyntheticEntryId(int clientId, int index)
|
||||
{
|
||||
var hash = GenerateHash($"entry-{clientId}-{index}");
|
||||
return $"sha256:{hash}:pkg:npm/example@1.0.{index}";
|
||||
}
|
||||
|
||||
private static string[] GenerateSyntheticEvidenceIds(int count)
|
||||
{
|
||||
return Enumerable.Range(0, count)
|
||||
.Select(i => $"sha256:{GenerateHash($"evidence-{i}")}")
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static ProofBundle GenerateSyntheticProof(int clientId, int index)
|
||||
{
|
||||
return new ProofBundle
|
||||
{
|
||||
EntryId = GenerateSyntheticEntryId(clientId, index),
|
||||
Envelope = new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.stellaops.proof+json",
|
||||
Payload = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{{\"id\":\"{clientId}-{index}\"}}")),
|
||||
Signatures = new[]
|
||||
{
|
||||
new DsseSignature
|
||||
{
|
||||
KeyId = "test-key",
|
||||
Sig = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature"))
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string GenerateSyntheticProofJson(int index)
|
||||
{
|
||||
return $@"{{
|
||||
""entryId"": ""sha256:{GenerateHash($"entry-{index}")}:pkg:npm/example@1.0.{index}"",
|
||||
""spineId"": ""sha256:{GenerateHash($"spine-{index}")}"",
|
||||
""evidenceIds"": [""{GenerateHash($"ev1-{index}")}"", ""{GenerateHash($"ev2-{index}")}""],
|
||||
""reasoningId"": ""sha256:{GenerateHash($"reason-{index}")}"",
|
||||
""vexVerdictId"": ""sha256:{GenerateHash($"vex-{index}")}"",
|
||||
""policyVersion"": ""v2.3.1"",
|
||||
""createdAt"": ""{DateTimeOffset.UtcNow:O}""
|
||||
}}";
|
||||
}
|
||||
|
||||
private static string GenerateHash(string input)
|
||||
{
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static double CalculatePercentile(ConcurrentBag<long> values, int percentile)
|
||||
{
|
||||
if (!values.Any()) return 0;
|
||||
var sorted = values.OrderBy(x => x).ToList();
|
||||
return CalculatePercentileFromSorted(sorted.Select(x => (double)x).ToList(), percentile);
|
||||
}
|
||||
|
||||
private static double CalculatePercentileFromSorted<T>(List<T> sorted, int percentile) where T : IConvertible
|
||||
{
|
||||
if (sorted.Count == 0) return 0;
|
||||
var index = (int)Math.Ceiling(percentile / 100.0 * sorted.Count) - 1;
|
||||
index = Math.Max(0, Math.Min(index, sorted.Count - 1));
|
||||
return sorted[index].ToDouble(null);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Supporting Types for Load Tests
|
||||
|
||||
/// <summary>
|
||||
/// Interface for proof spine building (mock target for load tests).
|
||||
/// </summary>
|
||||
public interface IProofSpineBuilder
|
||||
{
|
||||
Task<ProofSpine> BuildAsync(
|
||||
string entryId,
|
||||
string[] evidenceIds,
|
||||
string reasoningId,
|
||||
string vexVerdictId,
|
||||
string policyVersion,
|
||||
CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a proof spine created for an SBOM entry.
|
||||
/// </summary>
|
||||
public class ProofSpine
|
||||
{
|
||||
public required string EntryId { get; init; }
|
||||
public required string SpineId { get; init; }
|
||||
public required string PolicyVersion { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for DSSE envelope verification.
|
||||
/// </summary>
|
||||
public interface IDsseVerifier
|
||||
{
|
||||
Task<DsseVerificationResult> VerifyAsync(DsseEnvelope envelope, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE verification result.
|
||||
/// </summary>
|
||||
public class DsseVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for ID recomputation verification.
|
||||
/// </summary>
|
||||
public interface IIdRecomputer
|
||||
{
|
||||
Task<IdVerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// ID verification result.
|
||||
/// </summary>
|
||||
public class IdVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public string? ExpectedId { get; init; }
|
||||
public string? ActualId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for Rekor inclusion proof verification.
|
||||
/// </summary>
|
||||
public interface IRekorVerifier
|
||||
{
|
||||
Task<RekorVerificationResult> VerifyInclusionAsync(RekorEntry entry, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Rekor verification result.
|
||||
/// </summary>
|
||||
public class RekorVerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public long? LogIndex { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a Rekor transparency log entry.
|
||||
/// </summary>
|
||||
public class RekorEntry
|
||||
{
|
||||
public long LogIndex { get; init; }
|
||||
public string? LogId { get; init; }
|
||||
public string? Body { get; init; }
|
||||
public DateTimeOffset IntegratedTime { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope for proof bundles.
|
||||
/// </summary>
|
||||
public class DsseEnvelope
|
||||
{
|
||||
public required string PayloadType { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required DsseSignature[] Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature within an envelope.
|
||||
/// </summary>
|
||||
public class DsseSignature
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete proof bundle for verification.
|
||||
/// </summary>
|
||||
public class ProofBundle
|
||||
{
|
||||
public required string EntryId { get; init; }
|
||||
public required DsseEnvelope Envelope { get; init; }
|
||||
public RekorEntry? RekorEntry { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete verification result from the pipeline.
|
||||
/// </summary>
|
||||
public class VerificationResult
|
||||
{
|
||||
public bool IsValid { get; init; }
|
||||
public DsseVerificationResult? DsseResult { get; init; }
|
||||
public IdVerificationResult? IdResult { get; init; }
|
||||
public RekorVerificationResult? RekorResult { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verification pipeline that runs all verification steps.
|
||||
/// </summary>
|
||||
public class VerificationPipeline
|
||||
{
|
||||
private readonly IDsseVerifier _dsseVerifier;
|
||||
private readonly IIdRecomputer _idRecomputer;
|
||||
private readonly IRekorVerifier _rekorVerifier;
|
||||
private readonly ILogger<VerificationPipeline> _logger;
|
||||
|
||||
public VerificationPipeline(
|
||||
IDsseVerifier dsseVerifier,
|
||||
IIdRecomputer idRecomputer,
|
||||
IRekorVerifier rekorVerifier,
|
||||
ILogger<VerificationPipeline> logger)
|
||||
{
|
||||
_dsseVerifier = dsseVerifier;
|
||||
_idRecomputer = idRecomputer;
|
||||
_rekorVerifier = rekorVerifier;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<VerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken)
|
||||
{
|
||||
// Step 1: DSSE signature verification
|
||||
var dsseResult = await _dsseVerifier.VerifyAsync(bundle.Envelope, cancellationToken);
|
||||
if (!dsseResult.IsValid)
|
||||
{
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
DsseResult = dsseResult,
|
||||
Error = $"DSSE verification failed: {dsseResult.Error}"
|
||||
};
|
||||
}
|
||||
|
||||
// Step 2: ID recomputation
|
||||
var idResult = await _idRecomputer.VerifyAsync(bundle, cancellationToken);
|
||||
if (!idResult.IsValid)
|
||||
{
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
DsseResult = dsseResult,
|
||||
IdResult = idResult,
|
||||
Error = $"ID mismatch: expected {idResult.ExpectedId}, got {idResult.ActualId}"
|
||||
};
|
||||
}
|
||||
|
||||
// Step 3: Rekor inclusion (if entry present)
|
||||
RekorVerificationResult? rekorResult = null;
|
||||
if (bundle.RekorEntry != null)
|
||||
{
|
||||
rekorResult = await _rekorVerifier.VerifyInclusionAsync(bundle.RekorEntry, cancellationToken);
|
||||
if (!rekorResult.IsValid)
|
||||
{
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
DsseResult = dsseResult,
|
||||
IdResult = idResult,
|
||||
RekorResult = rekorResult,
|
||||
Error = $"Rekor verification failed: {rekorResult.Error}"
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return new VerificationResult
|
||||
{
|
||||
IsValid = true,
|
||||
DsseResult = dsseResult,
|
||||
IdResult = idResult,
|
||||
RekorResult = rekorResult
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -18,7 +18,7 @@ public class ContentAddressedIdGeneratorTests
|
||||
|
||||
public ContentAddressedIdGeneratorTests()
|
||||
{
|
||||
var canonicalizer = new JsonCanonicalizer();
|
||||
var canonicalizer = new Rfc8785JsonCanonicalizer();
|
||||
var merkleBuilder = new DeterministicMerkleTreeBuilder();
|
||||
_generator = new ContentAddressedIdGenerator(canonicalizer, merkleBuilder);
|
||||
}
|
||||
@@ -117,8 +117,8 @@ public class ContentAddressedIdGeneratorTests
|
||||
[Fact]
|
||||
public void ComputeVexVerdictId_DifferentStatus_ProducesDifferentId()
|
||||
{
|
||||
var predicate1 = CreateTestVexPredicate() with { Status = VexStatus.Affected };
|
||||
var predicate2 = CreateTestVexPredicate() with { Status = VexStatus.NotAffected };
|
||||
var predicate1 = CreateTestVexPredicate() with { Status = "affected" };
|
||||
var predicate2 = CreateTestVexPredicate() with { Status = "not_affected" };
|
||||
|
||||
var id1 = _generator.ComputeVexVerdictId(predicate1);
|
||||
var id2 = _generator.ComputeVexVerdictId(predicate2);
|
||||
@@ -152,8 +152,8 @@ public class ContentAddressedIdGeneratorTests
|
||||
var vexVerdictId = CreateTestVexVerdictId();
|
||||
|
||||
// Different order, should produce same result
|
||||
var unsorted = new[] { CreateTestEvidenceId("z"), CreateTestEvidenceId("a") };
|
||||
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("z") };
|
||||
var unsorted = new[] { CreateTestEvidenceId("f"), CreateTestEvidenceId("a") };
|
||||
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("f") };
|
||||
|
||||
var id1 = _generator.ComputeProofBundleId(sbomEntryId, unsorted, reasoningId, vexVerdictId);
|
||||
var id2 = _generator.ComputeProofBundleId(sbomEntryId, sorted, reasoningId, vexVerdictId);
|
||||
@@ -272,9 +272,9 @@ public class ContentAddressedIdGeneratorTests
|
||||
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
|
||||
EvidenceIds = ["sha256:evidence1", "sha256:evidence2"],
|
||||
PolicyVersion = "v2024.12.16",
|
||||
Inputs = new ReasoningInputs
|
||||
Inputs = new Dictionary<string, object>
|
||||
{
|
||||
CurrentEvaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
|
||||
["currentEvaluationTime"] = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -282,12 +282,14 @@ public class ContentAddressedIdGeneratorTests
|
||||
{
|
||||
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "Vulnerable code is not in execution path"
|
||||
Status = "not_affected",
|
||||
Justification = "vulnerable_code_not_present",
|
||||
PolicyVersion = "v2024.12.16",
|
||||
ReasoningId = "sha256:reasoning1"
|
||||
};
|
||||
|
||||
private static SbomEntryId CreateTestSbomEntryId() =>
|
||||
new("sha256:sbom123", "pkg:npm/lodash", "4.17.21");
|
||||
new($"sha256:{new string('0', 64)}", "pkg:npm/lodash", "4.17.21");
|
||||
|
||||
private static EvidenceId CreateTestEvidenceId(string suffix) =>
|
||||
new($"a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6{suffix.PadLeft(4, '0')}"[..64]);
|
||||
|
||||
@@ -43,16 +43,22 @@ public class ContentAddressedIdTests
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
[InlineData("invalid")]
|
||||
[InlineData(":digest")]
|
||||
[InlineData("algo:")]
|
||||
public void Parse_InvalidFormat_Throws(string input)
|
||||
public void Parse_InvalidFormat_ThrowsFormatException(string input)
|
||||
{
|
||||
Assert.Throws<FormatException>(() => ContentAddressedId.Parse(input));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
public void Parse_EmptyOrWhitespace_ThrowsArgumentException(string input)
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() => ContentAddressedId.Parse(input));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_InvalidDigestLength_Throws()
|
||||
{
|
||||
@@ -68,26 +74,6 @@ public class ContentAddressedIdTests
|
||||
|
||||
Assert.Equal(input, id.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TrySplit_ValidInput_ReturnsTrue()
|
||||
{
|
||||
var valid = ContentAddressedId.TrySplit(
|
||||
"sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2",
|
||||
out var algorithm,
|
||||
out var digest);
|
||||
|
||||
Assert.True(valid);
|
||||
Assert.Equal("sha256", algorithm);
|
||||
Assert.NotEmpty(digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TrySplit_InvalidInput_ReturnsFalse()
|
||||
{
|
||||
var valid = ContentAddressedId.TrySplit("invalid", out _, out _);
|
||||
Assert.False(valid);
|
||||
}
|
||||
}
|
||||
|
||||
public class EvidenceIdTests
|
||||
@@ -153,12 +139,14 @@ public class ProofBundleIdTests
|
||||
|
||||
public class SbomEntryIdTests
|
||||
{
|
||||
private static readonly string SbomDigest = $"sha256:{new string('a', 64)}";
|
||||
|
||||
[Fact]
|
||||
public void Constructor_WithVersion_CreatesId()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
|
||||
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash", "4.17.21");
|
||||
|
||||
Assert.Equal("sha256:abc123", id.SbomDigest);
|
||||
Assert.Equal(SbomDigest, id.SbomDigest);
|
||||
Assert.Equal("pkg:npm/lodash", id.Purl);
|
||||
Assert.Equal("4.17.21", id.Version);
|
||||
}
|
||||
@@ -166,9 +154,9 @@ public class SbomEntryIdTests
|
||||
[Fact]
|
||||
public void Constructor_WithoutVersion_CreatesId()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
|
||||
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash");
|
||||
|
||||
Assert.Equal("sha256:abc123", id.SbomDigest);
|
||||
Assert.Equal(SbomDigest, id.SbomDigest);
|
||||
Assert.Equal("pkg:npm/lodash", id.Purl);
|
||||
Assert.Null(id.Version);
|
||||
}
|
||||
@@ -176,15 +164,15 @@ public class SbomEntryIdTests
|
||||
[Fact]
|
||||
public void ToString_WithVersion_IncludesVersion()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
|
||||
Assert.Equal("sha256:abc123:pkg:npm/lodash@4.17.21", id.ToString());
|
||||
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash", "4.17.21");
|
||||
Assert.Equal($"{SbomDigest}:pkg:npm/lodash@4.17.21", id.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_WithoutVersion_OmitsVersion()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
|
||||
Assert.Equal("sha256:abc123:pkg:npm/lodash", id.ToString());
|
||||
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash");
|
||||
Assert.Equal($"{SbomDigest}:pkg:npm/lodash", id.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,18 +6,14 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.ProofChain.Json;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
public class JsonCanonicalizerTests
|
||||
public sealed class JsonCanonicalizerTests
|
||||
{
|
||||
private readonly IJsonCanonicalizer _canonicalizer;
|
||||
|
||||
public JsonCanonicalizerTests()
|
||||
{
|
||||
_canonicalizer = new JsonCanonicalizer();
|
||||
}
|
||||
private readonly IJsonCanonicalizer _canonicalizer = new Rfc8785JsonCanonicalizer();
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_SortsKeys()
|
||||
@@ -29,9 +25,8 @@ public class JsonCanonicalizerTests
|
||||
Assert.Contains("\"a\":", outputStr);
|
||||
Assert.Contains("\"z\":", outputStr);
|
||||
|
||||
// Verify 'a' comes before 'z'
|
||||
var aIndex = outputStr.IndexOf("\"a\":");
|
||||
var zIndex = outputStr.IndexOf("\"z\":");
|
||||
var aIndex = outputStr.IndexOf("\"a\":", StringComparison.Ordinal);
|
||||
var zIndex = outputStr.IndexOf("\"z\":", StringComparison.Ordinal);
|
||||
Assert.True(aIndex < zIndex, "Keys should be sorted alphabetically");
|
||||
}
|
||||
|
||||
@@ -43,17 +38,18 @@ public class JsonCanonicalizerTests
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.DoesNotContain(" ", outputStr);
|
||||
Assert.Equal("{\"key\":\"value\"}", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_PreservesUtf8()
|
||||
public void Canonicalize_PreservesUnicodeContent()
|
||||
{
|
||||
var input = """{"text": "hello 世界 🌍"}"""u8;
|
||||
var text = "hello 世界 \U0001F30D";
|
||||
var input = JsonSerializer.SerializeToUtf8Bytes(new { text });
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("世界", outputStr);
|
||||
Assert.Contains("🌍", outputStr);
|
||||
using var document = JsonDocument.Parse(output);
|
||||
Assert.Equal(text, document.RootElement.GetProperty("text").GetString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
@@ -67,20 +63,6 @@ public class JsonCanonicalizerTests
|
||||
Assert.Equal(output1, output2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_NestedObjects_SortsAllLevels()
|
||||
{
|
||||
var input = """{"outer": {"z": 1, "a": 2}, "inner": {"y": 3, "b": 4}}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
|
||||
// Check that nested keys are also sorted
|
||||
var nestedA = outputStr.IndexOf("\"a\":");
|
||||
var nestedZ = outputStr.IndexOf("\"z\":");
|
||||
Assert.True(nestedA < nestedZ, "Nested keys should be sorted");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_Arrays_PreservesOrder()
|
||||
{
|
||||
@@ -91,16 +73,6 @@ public class JsonCanonicalizerTests
|
||||
Assert.Contains("[3,1,2]", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_NullValue_Preserved()
|
||||
{
|
||||
var input = """{"key": null}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("null", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_BooleanValues_LowerCase()
|
||||
{
|
||||
@@ -114,18 +86,6 @@ public class JsonCanonicalizerTests
|
||||
Assert.DoesNotContain("False", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_Numbers_MinimalRepresentation()
|
||||
{
|
||||
var input = """{"integer": 42, "float": 3.14, "zero": 0}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("42", outputStr);
|
||||
Assert.Contains("3.14", outputStr);
|
||||
Assert.Contains("0", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_EmptyObject_ReturnsEmptyBraces()
|
||||
{
|
||||
@@ -135,90 +95,5 @@ public class JsonCanonicalizerTests
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Equal("{}", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_EmptyArray_ReturnsEmptyBrackets()
|
||||
{
|
||||
var input = """{"arr": []}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("[]", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_StringEscaping_Preserved()
|
||||
{
|
||||
var input = """{"text": "line1\nline2\ttab"}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("\\n", outputStr);
|
||||
Assert.Contains("\\t", outputStr);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("""{"a":1}""")]
|
||||
[InlineData("""{"a":1,"b":2}""")]
|
||||
[InlineData("""{"nested":{"key":"value"}}""")]
|
||||
[InlineData("""{"array":[1,2,3]}""")]
|
||||
public void Canonicalize_AlreadyCanonical_Unchanged(string input)
|
||||
{
|
||||
var inputBytes = Encoding.UTF8.GetBytes(input);
|
||||
var output = _canonicalizer.Canonicalize(inputBytes);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Equal(input, outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_ComplexNesting_Deterministic()
|
||||
{
|
||||
var input = """
|
||||
{
|
||||
"level1": {
|
||||
"z": {
|
||||
"y": 1,
|
||||
"x": 2
|
||||
},
|
||||
"a": {
|
||||
"b": 3,
|
||||
"a": 4
|
||||
}
|
||||
},
|
||||
"array": [
|
||||
{"z": 1, "a": 2},
|
||||
{"y": 3, "b": 4}
|
||||
]
|
||||
}
|
||||
"""u8;
|
||||
|
||||
var output1 = _canonicalizer.Canonicalize(input);
|
||||
var output2 = _canonicalizer.Canonicalize(input);
|
||||
|
||||
Assert.Equal(output1, output2);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output1);
|
||||
Assert.DoesNotContain("\n", outputStr);
|
||||
Assert.DoesNotContain(" ", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanonicalizeDifferentWhitespace_ProducesSameOutput()
|
||||
{
|
||||
var input1 = """{"key":"value"}"""u8;
|
||||
var input2 = """{ "key" : "value" }"""u8;
|
||||
var input3 = """
|
||||
{
|
||||
"key": "value"
|
||||
}
|
||||
"""u8;
|
||||
|
||||
var output1 = _canonicalizer.Canonicalize(input1);
|
||||
var output2 = _canonicalizer.Canonicalize(input2);
|
||||
var output3 = _canonicalizer.Canonicalize(input3);
|
||||
|
||||
Assert.Equal(output1, output2);
|
||||
Assert.Equal(output2, output3);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -104,14 +104,11 @@ public class MerkleTreeBuilderTests
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_EmptyLeaves_ReturnsEmptyOrZeroHash()
|
||||
public void ComputeMerkleRoot_EmptyLeaves_Throws()
|
||||
{
|
||||
var leaves = Array.Empty<ReadOnlyMemory<byte>>();
|
||||
|
||||
// Should handle gracefully (either empty or zero hash)
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
Assert.Throws<ArgumentException>(() => _builder.ComputeMerkleRoot(leaves));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -243,7 +243,7 @@ public class ProofSpineAssemblyIntegrationTests
|
||||
leaves.Add(Encoding.UTF8.GetBytes(vexVerdictId));
|
||||
|
||||
// Build merkle tree
|
||||
return _builder.ComputeMerkleRoot(leaves.ToArray());
|
||||
return _builder.ComputeMerkleRoot(leaves);
|
||||
}
|
||||
|
||||
private static string FormatAsId(byte[] hash)
|
||||
@@ -251,65 +251,3 @@ public class ProofSpineAssemblyIntegrationTests
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for merkle tree building.
|
||||
/// </summary>
|
||||
public interface IMerkleTreeBuilder
|
||||
{
|
||||
byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic merkle tree builder using SHA-256.
|
||||
/// </summary>
|
||||
public class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
|
||||
{
|
||||
public byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves)
|
||||
{
|
||||
if (leaves.Length == 0)
|
||||
{
|
||||
return new byte[32]; // Zero hash for empty tree
|
||||
}
|
||||
|
||||
// Hash all leaves
|
||||
var currentLevel = new List<byte[]>();
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
|
||||
foreach (var leaf in leaves)
|
||||
{
|
||||
currentLevel.Add(sha256.ComputeHash(leaf.ToArray()));
|
||||
}
|
||||
|
||||
// Pad to power of 2 by duplicating last leaf
|
||||
while (!IsPowerOfTwo(currentLevel.Count))
|
||||
{
|
||||
currentLevel.Add(currentLevel[^1]);
|
||||
}
|
||||
|
||||
// Build tree bottom-up
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (int i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
var left = currentLevel[i];
|
||||
var right = currentLevel[i + 1];
|
||||
|
||||
// Concatenate and hash
|
||||
var combined = new byte[left.Length + right.Length];
|
||||
Buffer.BlockCopy(left, 0, combined, 0, left.Length);
|
||||
Buffer.BlockCopy(right, 0, combined, left.Length, right.Length);
|
||||
|
||||
nextLevel.Add(sha256.ComputeHash(combined));
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return currentLevel[0];
|
||||
}
|
||||
|
||||
private static bool IsPowerOfTwo(int n) => n > 0 && (n & (n - 1)) == 0;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
using FluentAssertions;
|
||||
using Org.BouncyCastle.Crypto.Parameters;
|
||||
using StellaOps.Attestor.Envelope;
|
||||
using StellaOps.Attestor.ProofChain.Builders;
|
||||
using StellaOps.Attestor.ProofChain.Json;
|
||||
using StellaOps.Attestor.ProofChain.Signing;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Signing;
|
||||
|
||||
public sealed class ProofChainSignerTests
|
||||
{
|
||||
private static readonly DateTimeOffset FixedTime = new(2025, 12, 17, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public async Task SignThenVerify_EvidenceStatement_Passes()
|
||||
{
|
||||
var (signer, keyId) = CreateSigner();
|
||||
|
||||
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('0', 64)}");
|
||||
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
|
||||
|
||||
envelope.PayloadType.Should().Be(ProofChainSigner.InTotoPayloadType);
|
||||
envelope.Signatures.Should().ContainSingle();
|
||||
envelope.Signatures[0].KeyId.Should().Be(keyId);
|
||||
envelope.Signatures[0].Sig.Should().NotBeNullOrWhiteSpace();
|
||||
envelope.Payload.Should().NotBeNullOrWhiteSpace();
|
||||
|
||||
var result = await signer.VerifyEnvelopeAsync(envelope, new[] { keyId });
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.KeyId.Should().Be(keyId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Verify_TamperedPayload_Fails()
|
||||
{
|
||||
var (signer, keyId) = CreateSigner();
|
||||
|
||||
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('1', 64)}");
|
||||
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
|
||||
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
payloadBytes[^1] ^= 0xff;
|
||||
|
||||
var tampered = envelope with { Payload = Convert.ToBase64String(payloadBytes) };
|
||||
var result = await signer.VerifyEnvelopeAsync(tampered, new[] { keyId });
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CrossPlatformVector_Ed25519Signature_IsStable()
|
||||
{
|
||||
var (signer, keyId) = CreateSigner(keyIdOverride: "test-key");
|
||||
|
||||
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('2', 64)}");
|
||||
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
|
||||
|
||||
envelope.Signatures[0].KeyId.Should().Be(keyId);
|
||||
|
||||
// Filled in after the first successful run to lock the vector across platforms/implementations.
|
||||
const string expectedSig = "zJtzdRX76ENKf4IePv5AyTxqdS2YlVMcseaw2UBh1eBhfarUNq2AdiKyxVMWPftSy2uJJGfo7R7BilQO+Xj8AA==";
|
||||
envelope.Signatures[0].Sig.Should().Be(expectedSig);
|
||||
}
|
||||
|
||||
private static EvidenceStatement CreateEvidenceStatement(string evidenceId)
|
||||
{
|
||||
var builder = new StatementBuilder();
|
||||
var subject = new ProofSubject
|
||||
{
|
||||
Name = "image:demo",
|
||||
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
|
||||
};
|
||||
|
||||
var predicate = new EvidencePayload
|
||||
{
|
||||
Source = "trivy",
|
||||
SourceVersion = "0.50.0",
|
||||
CollectionTime = FixedTime,
|
||||
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
|
||||
VulnerabilityId = "CVE-2025-1234",
|
||||
RawFinding = new { severity = "high" },
|
||||
EvidenceId = evidenceId
|
||||
};
|
||||
|
||||
return builder.BuildEvidenceStatement(subject, predicate);
|
||||
}
|
||||
|
||||
private static (IProofChainSigner Signer, string KeyId) CreateSigner(string? keyIdOverride = null)
|
||||
{
|
||||
var seed = Enumerable.Range(0, 32).Select(static i => (byte)i).ToArray();
|
||||
var privateKey = new Ed25519PrivateKeyParameters(seed, 0);
|
||||
var publicKey = privateKey.GeneratePublicKey().GetEncoded();
|
||||
|
||||
var key = EnvelopeKey.CreateEd25519Signer(seed, publicKey, keyId: keyIdOverride ?? "proofchain-test-key");
|
||||
|
||||
var keyStore = new StaticKeyStore(new Dictionary<SigningKeyProfile, EnvelopeKey>
|
||||
{
|
||||
[SigningKeyProfile.Evidence] = key
|
||||
});
|
||||
|
||||
return (new ProofChainSigner(keyStore, new Rfc8785JsonCanonicalizer()), key.KeyId);
|
||||
}
|
||||
|
||||
private sealed class StaticKeyStore : IProofChainKeyStore
|
||||
{
|
||||
private readonly IReadOnlyDictionary<SigningKeyProfile, EnvelopeKey> _signingKeys;
|
||||
private readonly IReadOnlyDictionary<string, EnvelopeKey> _verificationKeys;
|
||||
|
||||
public StaticKeyStore(IReadOnlyDictionary<SigningKeyProfile, EnvelopeKey> signingKeys)
|
||||
{
|
||||
_signingKeys = signingKeys;
|
||||
_verificationKeys = signingKeys.Values.ToDictionary(static key => key.KeyId, static key => key, StringComparer.Ordinal);
|
||||
}
|
||||
|
||||
public bool TryGetSigningKey(SigningKeyProfile profile, out EnvelopeKey key)
|
||||
=> _signingKeys.TryGetValue(profile, out key!);
|
||||
|
||||
public bool TryGetVerificationKey(string keyId, out EnvelopeKey key)
|
||||
=> _verificationKeys.TryGetValue(keyId, out key!);
|
||||
}
|
||||
}
|
||||
@@ -8,191 +8,130 @@ using StellaOps.Attestor.ProofChain.Statements;
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for all DSSE statement types (Task PROOF-PRED-0012).
|
||||
/// Unit tests for proof chain statement construction (Task PROOF-PRED-0012).
|
||||
/// </summary>
|
||||
public class StatementBuilderTests
|
||||
public sealed class StatementBuilderTests
|
||||
{
|
||||
private readonly StatementBuilder _builder = new();
|
||||
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void BuildEvidenceStatement_SetsPredicateType()
|
||||
public void BuildEvidenceStatement_SetsPredicateTypeAndSubject()
|
||||
{
|
||||
var statement = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
|
||||
source: "trivy",
|
||||
sourceVersion: "0.50.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "sbom-123");
|
||||
var subject = CreateSubject("image:demo", "abc123");
|
||||
var predicate = new EvidencePayload
|
||||
{
|
||||
Source = "trivy",
|
||||
SourceVersion = "0.50.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
|
||||
VulnerabilityId = "CVE-2025-1234",
|
||||
RawFinding = new { severity = "high" },
|
||||
EvidenceId = $"sha256:{new string('0', 64)}"
|
||||
};
|
||||
|
||||
var statement = _builder.BuildEvidenceStatement(subject, predicate);
|
||||
|
||||
Assert.Equal("evidence.stella/v1", statement.PredicateType);
|
||||
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildEvidenceStatement_PopulatesPredicate()
|
||||
{
|
||||
var statement = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
|
||||
source: "trivy",
|
||||
sourceVersion: "0.50.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "sbom-123",
|
||||
vulnerabilityId: "CVE-2025-1234");
|
||||
|
||||
Assert.Equal("evidence.stella/v1", statement.PredicateType);
|
||||
Assert.Single(statement.Subject);
|
||||
Assert.Equal(subject.Name, statement.Subject[0].Name);
|
||||
Assert.Equal("abc123", statement.Subject[0].Digest["sha256"]);
|
||||
Assert.Equal("trivy", statement.Predicate.Source);
|
||||
Assert.Equal("0.50.0", statement.Predicate.SourceVersion);
|
||||
Assert.Equal(_fixedTime, statement.Predicate.CollectionTime);
|
||||
Assert.Equal("sbom-123", statement.Predicate.SbomEntryId);
|
||||
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildProofSpineStatement_SetsPredicateType()
|
||||
public void BuildSbomLinkageStatement_SetsAllSubjects()
|
||||
{
|
||||
var statement = _builder.BuildProofSpineStatement(
|
||||
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
|
||||
spineAlgorithm: "sha256-merkle",
|
||||
rootHash: "root-hash",
|
||||
leafHashes: ["leaf1", "leaf2", "leaf3"]);
|
||||
var subjects = new[]
|
||||
{
|
||||
CreateSubject("image:demo", "abc123"),
|
||||
CreateSubject("pkg:npm/lodash@4.17.21", "def456"),
|
||||
};
|
||||
|
||||
Assert.Equal("proofspine.stella/v1", statement.PredicateType);
|
||||
var predicate = new SbomLinkagePayload
|
||||
{
|
||||
Sbom = new SbomDescriptor
|
||||
{
|
||||
Id = "sbom-1",
|
||||
Format = "cyclonedx",
|
||||
SpecVersion = "1.6",
|
||||
MediaType = "application/vnd.cyclonedx+json",
|
||||
Sha256 = new string('1', 64),
|
||||
Location = "file:///sboms/demo.json"
|
||||
},
|
||||
Generator = new GeneratorDescriptor
|
||||
{
|
||||
Name = "stellaops-sbomgen",
|
||||
Version = "0.1.0"
|
||||
},
|
||||
GeneratedAt = _fixedTime,
|
||||
Tags = new Dictionary<string, string> { ["env"] = "test" }
|
||||
};
|
||||
|
||||
var statement = _builder.BuildSbomLinkageStatement(subjects, predicate);
|
||||
|
||||
Assert.Equal("https://stella-ops.org/predicates/sbom-linkage/v1", statement.PredicateType);
|
||||
Assert.Equal(2, statement.Subject.Count);
|
||||
Assert.Equal(subjects[0].Name, statement.Subject[0].Name);
|
||||
Assert.Equal(subjects[1].Name, statement.Subject[1].Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildProofSpineStatement_ContainsLeafHashes()
|
||||
public void BuildSbomLinkageStatement_EmptySubjects_Throws()
|
||||
{
|
||||
var leafHashes = new[] { "hash1", "hash2", "hash3", "hash4" };
|
||||
var statement = _builder.BuildProofSpineStatement(
|
||||
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
|
||||
spineAlgorithm: "sha256-merkle",
|
||||
rootHash: "merkle-root",
|
||||
leafHashes: leafHashes);
|
||||
var predicate = new SbomLinkagePayload
|
||||
{
|
||||
Sbom = new SbomDescriptor
|
||||
{
|
||||
Id = "sbom-1",
|
||||
Format = "cyclonedx",
|
||||
SpecVersion = "1.6",
|
||||
MediaType = "application/vnd.cyclonedx+json",
|
||||
Sha256 = new string('1', 64)
|
||||
},
|
||||
Generator = new GeneratorDescriptor
|
||||
{
|
||||
Name = "stellaops-sbomgen",
|
||||
Version = "0.1.0"
|
||||
},
|
||||
GeneratedAt = _fixedTime
|
||||
};
|
||||
|
||||
Assert.Equal("sha256-merkle", statement.Predicate.Algorithm);
|
||||
Assert.Equal("merkle-root", statement.Predicate.RootHash);
|
||||
Assert.Equal(4, statement.Predicate.LeafHashes.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildVexVerdictStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildVexVerdictStatement(
|
||||
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
|
||||
vulnerabilityId: "CVE-2025-1234",
|
||||
vexStatus: "not_affected",
|
||||
justification: "vulnerable_code_not_present",
|
||||
analysisTime: _fixedTime);
|
||||
|
||||
Assert.Equal("vexverdict.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildVexVerdictStatement_PopulatesVexDetails()
|
||||
{
|
||||
var statement = _builder.BuildVexVerdictStatement(
|
||||
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
|
||||
vulnerabilityId: "CVE-2025-1234",
|
||||
vexStatus: "not_affected",
|
||||
justification: "vulnerable_code_not_present",
|
||||
analysisTime: _fixedTime);
|
||||
|
||||
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
|
||||
Assert.Equal("not_affected", statement.Predicate.Status);
|
||||
Assert.Equal("vulnerable_code_not_present", statement.Predicate.Justification);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildReasoningStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildReasoningStatement(
|
||||
subject: new InTotoSubject { Name = "finding:123", Digest = new() { ["sha256"] = "abc123" } },
|
||||
reasoningType: "exploitability",
|
||||
conclusion: "not_exploitable",
|
||||
evidenceRefs: ["evidence1", "evidence2"]);
|
||||
|
||||
Assert.Equal("reasoning.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildVerdictReceiptStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildVerdictReceiptStatement(
|
||||
subject: new InTotoSubject { Name = "scan:456", Digest = new() { ["sha256"] = "abc123" } },
|
||||
verdictHash: "verdict-hash",
|
||||
verdictTime: _fixedTime,
|
||||
signatureAlgorithm: "ECDSA-P256");
|
||||
|
||||
Assert.Equal("verdictreceipt.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildSbomLinkageStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildSbomLinkageStatement(
|
||||
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
|
||||
sbomDigest: "sbom-digest",
|
||||
sbomFormat: "cyclonedx",
|
||||
sbomVersion: "1.6");
|
||||
|
||||
Assert.Equal("sbomlinkage.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllStatements_SerializeToValidJson()
|
||||
{
|
||||
var subject = new InTotoSubject { Name = "test", Digest = new() { ["sha256"] = "abc" } };
|
||||
|
||||
var evidence = _builder.BuildEvidenceStatement(subject, "trivy", "1.0", _fixedTime, "sbom1");
|
||||
var spine = _builder.BuildProofSpineStatement(subject, "sha256", "root", ["leaf1"]);
|
||||
var vex = _builder.BuildVexVerdictStatement(subject, "CVE-1", "fixed", null, _fixedTime);
|
||||
var reasoning = _builder.BuildReasoningStatement(subject, "exploitability", "safe", []);
|
||||
var receipt = _builder.BuildVerdictReceiptStatement(subject, "hash", _fixedTime, "ECDSA");
|
||||
var sbom = _builder.BuildSbomLinkageStatement(subject, "sbom-hash", "spdx", "3.0");
|
||||
|
||||
// All should serialize without throwing
|
||||
Assert.NotNull(JsonSerializer.Serialize(evidence));
|
||||
Assert.NotNull(JsonSerializer.Serialize(spine));
|
||||
Assert.NotNull(JsonSerializer.Serialize(vex));
|
||||
Assert.NotNull(JsonSerializer.Serialize(reasoning));
|
||||
Assert.NotNull(JsonSerializer.Serialize(receipt));
|
||||
Assert.NotNull(JsonSerializer.Serialize(sbom));
|
||||
Assert.Throws<ArgumentException>(() => _builder.BuildSbomLinkageStatement(Array.Empty<ProofSubject>(), predicate));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvidenceStatement_RoundTripsViaJson()
|
||||
{
|
||||
var original = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name: "artifact", Digest = new() { ["sha256"] = "hash123" } },
|
||||
source: "grype",
|
||||
sourceVersion: "0.80.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "entry-456",
|
||||
vulnerabilityId: "CVE-2025-9999");
|
||||
var subject = CreateSubject("image:demo", "abc123");
|
||||
var statement = _builder.BuildEvidenceStatement(subject, new EvidencePayload
|
||||
{
|
||||
Source = "grype",
|
||||
SourceVersion = "0.80.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
|
||||
VulnerabilityId = "CVE-2025-9999",
|
||||
RawFinding = "raw",
|
||||
EvidenceId = $"sha256:{new string('2', 64)}"
|
||||
});
|
||||
|
||||
var json = JsonSerializer.Serialize(original);
|
||||
var json = JsonSerializer.Serialize(statement);
|
||||
var restored = JsonSerializer.Deserialize<EvidenceStatement>(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.PredicateType, restored.PredicateType);
|
||||
Assert.Equal(original.Predicate.Source, restored.Predicate.Source);
|
||||
Assert.Equal(original.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
|
||||
Assert.Equal(statement.PredicateType, restored.PredicateType);
|
||||
Assert.Equal(statement.Subject[0].Name, restored.Subject[0].Name);
|
||||
Assert.Equal(statement.Predicate.EvidenceId, restored.Predicate.EvidenceId);
|
||||
Assert.Equal(statement.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofSpineStatement_RoundTripsViaJson()
|
||||
{
|
||||
var original = _builder.BuildProofSpineStatement(
|
||||
subject: new InTotoSubject { Name = "image:latest", Digest = new() { ["sha256"] = "img-hash" } },
|
||||
spineAlgorithm: "sha256-merkle-v2",
|
||||
rootHash: "merkle-root-abc",
|
||||
leafHashes: ["a", "b", "c", "d"]);
|
||||
|
||||
var json = JsonSerializer.Serialize(original);
|
||||
var restored = JsonSerializer.Deserialize<ProofSpineStatement>(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.Predicate.RootHash, restored.Predicate.RootHash);
|
||||
Assert.Equal(original.Predicate.LeafHashes.Length, restored.Predicate.LeafHashes.Length);
|
||||
}
|
||||
private static ProofSubject CreateSubject(string name, string sha256Digest)
|
||||
=> new()
|
||||
{
|
||||
Name = name,
|
||||
Digest = new Dictionary<string, string> { ["sha256"] = sha256Digest }
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,172 +0,0 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright (c) StellaOps Contributors
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.ProofChain.Builders;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
using StellaOps.Attestor.ProofChain.Validation;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for statement validation (Task PROOF-PRED-0015).
|
||||
/// </summary>
|
||||
public class StatementValidatorTests
|
||||
{
|
||||
private readonly StatementBuilder _builder = new();
|
||||
private readonly IStatementValidator _validator = new StatementValidator();
|
||||
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidEvidenceStatement_ReturnsSuccess()
|
||||
{
|
||||
var statement = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc123" } },
|
||||
source: "trivy",
|
||||
sourceVersion: "0.50.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "sbom-123");
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_EvidenceStatementWithEmptySource_ReturnsError()
|
||||
{
|
||||
var statement = new EvidenceStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc" } }],
|
||||
Predicate = new EvidencePayload
|
||||
{
|
||||
Source = "",
|
||||
SourceVersion = "1.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sbom-1"
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Source"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_StatementWithEmptySubject_ReturnsError()
|
||||
{
|
||||
var statement = new EvidenceStatement
|
||||
{
|
||||
Subject = [],
|
||||
Predicate = new EvidencePayload
|
||||
{
|
||||
Source = "trivy",
|
||||
SourceVersion = "1.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sbom-1"
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Subject"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ProofSpineWithEmptyLeafHashes_ReturnsError()
|
||||
{
|
||||
var statement = new ProofSpineStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "image", Digest = new() { ["sha256"] = "hash" } }],
|
||||
Predicate = new ProofSpinePayload
|
||||
{
|
||||
Algorithm = "sha256-merkle",
|
||||
RootHash = "root",
|
||||
LeafHashes = []
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("LeafHashes"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_VexVerdictWithValidStatus_ReturnsSuccess()
|
||||
{
|
||||
var validStatuses = new[] { "not_affected", "affected", "fixed", "under_investigation" };
|
||||
|
||||
foreach (var status in validStatuses)
|
||||
{
|
||||
var statement = _builder.BuildVexVerdictStatement(
|
||||
subject: new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } },
|
||||
vulnerabilityId: "CVE-2025-1",
|
||||
vexStatus: status,
|
||||
justification: null,
|
||||
analysisTime: _fixedTime);
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.True(result.IsValid, $"Status '{status}' should be valid");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_VexVerdictWithInvalidStatus_ReturnsError()
|
||||
{
|
||||
var statement = new VexVerdictStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } }],
|
||||
Predicate = new VexVerdictPayload
|
||||
{
|
||||
VulnerabilityId = "CVE-2025-1",
|
||||
Status = "invalid_status",
|
||||
AnalysisTime = _fixedTime
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Status"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ReasoningStatementWithEvidence_ReturnsSuccess()
|
||||
{
|
||||
var statement = _builder.BuildReasoningStatement(
|
||||
subject: new InTotoSubject { Name = "finding", Digest = new() { ["sha256"] = "abc" } },
|
||||
reasoningType: "exploitability",
|
||||
conclusion: "not_exploitable",
|
||||
evidenceRefs: ["evidence-1", "evidence-2"]);
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_SubjectWithMissingDigest_ReturnsError()
|
||||
{
|
||||
var statement = new EvidenceStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "artifact", Digest = new() }],
|
||||
Predicate = new EvidencePayload
|
||||
{
|
||||
Source = "trivy",
|
||||
SourceVersion = "1.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sbom-1"
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Digest"));
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,7 @@
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4" />
|
||||
<PackageReference Include="FluentAssertions" Version="6.12.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.24407.12" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="NSubstitute" Version="5.1.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.3" />
|
||||
@@ -26,7 +26,7 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include=\"..\\..\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj\" />
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -1,465 +0,0 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerificationPipelineIntegrationTests.cs
|
||||
// Sprint: SPRINT_0501_0001_0001_proof_evidence_chain_master
|
||||
// Task: PROOF-MASTER-0002
|
||||
// Description: Integration tests for the full proof chain verification pipeline
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using NSubstitute;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the verification pipeline.
|
||||
/// Tests PROOF-MASTER-0002: Full proof chain verification flow.
|
||||
/// </summary>
|
||||
public class VerificationPipelineIntegrationTests
|
||||
{
|
||||
private readonly IProofBundleStore _proofStore;
|
||||
private readonly IDsseVerifier _dsseVerifier;
|
||||
private readonly IRekorVerifier _rekorVerifier;
|
||||
private readonly ITrustAnchorResolver _trustAnchorResolver;
|
||||
private readonly ILogger<VerificationPipeline> _logger;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public VerificationPipelineIntegrationTests()
|
||||
{
|
||||
_proofStore = Substitute.For<IProofBundleStore>();
|
||||
_dsseVerifier = Substitute.For<IDsseVerifier>();
|
||||
_rekorVerifier = Substitute.For<IRekorVerifier>();
|
||||
_trustAnchorResolver = Substitute.For<ITrustAnchorResolver>();
|
||||
_logger = NullLogger<VerificationPipeline>.Instance;
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 17, 12, 0, 0, TimeSpan.Zero));
|
||||
}
|
||||
|
||||
#region Full Pipeline Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidProofBundle_AllStepsPass()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:valid123");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true,
|
||||
VerifierVersion = "1.0.0-test"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Receipt.Result.Should().Be(VerificationResult.Pass);
|
||||
result.Steps.Should().HaveCount(4);
|
||||
result.Steps.Should().OnlyContain(s => s.Passed);
|
||||
result.FirstFailure.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_InvalidDsseSignature_FailsAtFirstStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:invalid-sig");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupInvalidDsseVerification(keyId, "Signature mismatch");
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Receipt.Result.Should().Be(VerificationResult.Fail);
|
||||
result.FirstFailure.Should().NotBeNull();
|
||||
result.FirstFailure!.StepName.Should().Be("dsse_signature");
|
||||
result.Receipt.FailureReason.Should().Contain("Signature mismatch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputation()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:wrong-id");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupBundleWithWrongId(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.StepName == "id_recomputation" && !s.Passed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_NoRekorEntry_FailsAtRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:no-rekor");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupBundleWithoutRekor(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.StepName == "rekor_inclusion" && !s.Passed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:skip-rekor");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupBundleWithoutRekor(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidTrustAnchor(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false // Skip Rekor
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
|
||||
rekorStep.Should().NotBeNull();
|
||||
rekorStep!.Passed.Should().BeTrue();
|
||||
rekorStep.Details.Should().Contain("skipped");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchor()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:bad-key");
|
||||
var keyId = "unauthorized-key";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupTrustAnchorWithoutKey(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.StepName == "trust_anchor" && !s.Passed);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Receipt Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_GeneratesReceipt_WithCorrectFields()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:receipt-test");
|
||||
var keyId = "key-1";
|
||||
|
||||
SetupValidBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(keyId);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifierVersion = "2.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Receipt.Should().NotBeNull();
|
||||
result.Receipt.ReceiptId.Should().StartWith("receipt:");
|
||||
result.Receipt.VerifierVersion.Should().Be("2.0.0");
|
||||
result.Receipt.ProofBundleId.Should().Be(bundleId.Value);
|
||||
result.Receipt.StepsSummary.Should().HaveCount(4);
|
||||
result.Receipt.TotalDurationMs.Should().BeGreaterOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FailingPipeline_ReceiptContainsFailureReason()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:fail-receipt");
|
||||
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns((ProofBundle?)null);
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Receipt.Result.Should().Be(VerificationResult.Fail);
|
||||
result.Receipt.FailureReason.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cancellation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_Cancelled_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = new ProofBundleId("sha256:cancel-test");
|
||||
var cts = new CancellationTokenSource();
|
||||
cts.Cancel();
|
||||
|
||||
var pipeline = CreatePipeline();
|
||||
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
|
||||
|
||||
// Act
|
||||
var result = await pipeline.VerifyAsync(request, cts.Token);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Steps.Should().Contain(s => s.ErrorMessage?.Contains("cancelled") == true);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private VerificationPipeline CreatePipeline()
|
||||
{
|
||||
return VerificationPipeline.CreateDefault(
|
||||
_proofStore,
|
||||
_dsseVerifier,
|
||||
_rekorVerifier,
|
||||
_trustAnchorResolver,
|
||||
_logger,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
private void SetupValidBundle(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
var bundle = CreateTestBundle(keyId, includeRekor: true);
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns(bundle);
|
||||
}
|
||||
|
||||
private void SetupBundleWithWrongId(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
// Create a bundle but the ID won't match when recomputed
|
||||
var bundle = new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:wrong-statement-id", // Won't match content
|
||||
PredicateType = "evidence.stella/v1",
|
||||
Predicate = new { test = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = "test"u8.ToArray(),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
|
||||
}
|
||||
}
|
||||
},
|
||||
RekorLogEntry = CreateTestRekorEntry()
|
||||
};
|
||||
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns(bundle);
|
||||
}
|
||||
|
||||
private void SetupBundleWithoutRekor(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
var bundle = CreateTestBundle(keyId, includeRekor: false);
|
||||
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
|
||||
.Returns(bundle);
|
||||
}
|
||||
|
||||
private void SetupValidDsseVerification(string keyId)
|
||||
{
|
||||
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
|
||||
.Returns(new DsseVerificationResult { IsValid = true, KeyId = keyId });
|
||||
}
|
||||
|
||||
private void SetupInvalidDsseVerification(string keyId, string error)
|
||||
{
|
||||
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
|
||||
.Returns(new DsseVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = keyId,
|
||||
ErrorMessage = error
|
||||
});
|
||||
}
|
||||
|
||||
private void SetupValidRekorVerification()
|
||||
{
|
||||
_rekorVerifier.VerifyInclusionAsync(
|
||||
Arg.Any<string>(),
|
||||
Arg.Any<long>(),
|
||||
Arg.Any<InclusionProof>(),
|
||||
Arg.Any<SignedTreeHead>(),
|
||||
Arg.Any<CancellationToken>())
|
||||
.Returns(new RekorVerificationResult { IsValid = true });
|
||||
}
|
||||
|
||||
private void SetupValidTrustAnchor(string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = Guid.NewGuid(),
|
||||
AllowedKeyIds = new List<string> { keyId },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolver.GetAnchorAsync(Arg.Any<Guid>(), Arg.Any<CancellationToken>())
|
||||
.Returns(anchor);
|
||||
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
|
||||
.Returns(anchor);
|
||||
}
|
||||
|
||||
private void SetupTrustAnchorWithoutKey(string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = Guid.NewGuid(),
|
||||
AllowedKeyIds = new List<string> { "different-key" },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
|
||||
.Returns(anchor);
|
||||
}
|
||||
|
||||
private static ProofBundle CreateTestBundle(string keyId, bool includeRekor)
|
||||
{
|
||||
return new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:test-statement",
|
||||
PredicateType = "evidence.stella/v1",
|
||||
Predicate = new { test = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = "test"u8.ToArray(),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
|
||||
}
|
||||
}
|
||||
},
|
||||
RekorLogEntry = includeRekor ? CreateTestRekorEntry() : null
|
||||
};
|
||||
}
|
||||
|
||||
private static RekorLogEntry CreateTestRekorEntry()
|
||||
{
|
||||
return new RekorLogEntry
|
||||
{
|
||||
LogId = "test-log",
|
||||
LogIndex = 12345,
|
||||
InclusionProof = new InclusionProof
|
||||
{
|
||||
Hashes = new List<byte[]> { new byte[] { 0x01 } },
|
||||
TreeSize = 1000,
|
||||
RootHash = new byte[] { 0x02 }
|
||||
},
|
||||
SignedTreeHead = new SignedTreeHead
|
||||
{
|
||||
TreeSize = 1000,
|
||||
RootHash = new byte[] { 0x02 },
|
||||
Signature = new byte[] { 0x03 }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for testing.
|
||||
/// </summary>
|
||||
internal sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset initialTime)
|
||||
{
|
||||
_now = initialTime;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
|
||||
public void SetTime(DateTimeOffset time) => _now = time;
|
||||
}
|
||||
@@ -1,484 +0,0 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerificationPipelineTests.cs
|
||||
// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
|
||||
// Task: PROOF-API-0011 - Integration tests for verification pipeline
|
||||
// Description: Tests for the full verification pipeline including DSSE, ID
|
||||
// recomputation, Rekor inclusion, and trust anchor verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Receipts;
|
||||
using StellaOps.Attestor.ProofChain.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the verification pipeline.
|
||||
/// </summary>
|
||||
public class VerificationPipelineTests
|
||||
{
|
||||
private readonly Mock<IProofBundleStore> _proofStoreMock;
|
||||
private readonly Mock<IDsseVerifier> _dsseVerifierMock;
|
||||
private readonly Mock<IRekorVerifier> _rekorVerifierMock;
|
||||
private readonly Mock<ITrustAnchorResolver> _trustAnchorResolverMock;
|
||||
private readonly VerificationPipeline _pipeline;
|
||||
|
||||
public VerificationPipelineTests()
|
||||
{
|
||||
_proofStoreMock = new Mock<IProofBundleStore>();
|
||||
_dsseVerifierMock = new Mock<IDsseVerifier>();
|
||||
_rekorVerifierMock = new Mock<IRekorVerifier>();
|
||||
_trustAnchorResolverMock = new Mock<ITrustAnchorResolver>();
|
||||
|
||||
_pipeline = VerificationPipeline.CreateDefault(
|
||||
_proofStoreMock.Object,
|
||||
_dsseVerifierMock.Object,
|
||||
_rekorVerifierMock.Object,
|
||||
_trustAnchorResolverMock.Object,
|
||||
NullLogger<VerificationPipeline>.Instance);
|
||||
}
|
||||
|
||||
#region Full Pipeline Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_AllStepsPass_ReturnsValidResult()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var anchorId = Guid.NewGuid();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(VerificationResult.Pass, result.Receipt.Result);
|
||||
Assert.All(result.Steps, step => Assert.True(step.Passed));
|
||||
Assert.Null(result.FirstFailure);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_DsseSignatureInvalid_FailsAtDsseStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "invalid-key";
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupInvalidDsseVerification("Signature verification failed");
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
|
||||
Assert.NotNull(result.FirstFailure);
|
||||
Assert.Equal("dsse_signature", result.FirstFailure.StepName);
|
||||
Assert.Contains("Signature verification failed", result.FirstFailure.ErrorMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputationStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
|
||||
// Setup a bundle with mismatched ID
|
||||
SetupProofBundleWithMismatchedId(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
var idStep = result.Steps.FirstOrDefault(s => s.StepName == "id_recomputation");
|
||||
Assert.NotNull(idStep);
|
||||
// Note: The actual result depends on how the bundle is constructed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_RekorInclusionFails_FailsAtRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupInvalidRekorVerification("Inclusion proof invalid");
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
|
||||
Assert.NotNull(rekorStep);
|
||||
Assert.False(rekorStep.Passed);
|
||||
Assert.Contains("Inclusion proof invalid", rekorStep.ErrorMessage);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var anchorId = Guid.NewGuid();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId, includeRekorEntry: false);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidTrustAnchor(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
|
||||
Assert.NotNull(rekorStep);
|
||||
Assert.True(rekorStep.Passed);
|
||||
Assert.Contains("skipped", rekorStep.Details, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchorStep()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "unauthorized-key";
|
||||
var anchorId = Guid.NewGuid();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupTrustAnchorWithoutKey(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
var anchorStep = result.Steps.FirstOrDefault(s => s.StepName == "trust_anchor");
|
||||
Assert.NotNull(anchorStep);
|
||||
Assert.False(anchorStep.Passed);
|
||||
Assert.Contains("not authorized", anchorStep.ErrorMessage);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Receipt Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_GeneratesReceiptWithCorrectFields()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var anchorId = Guid.NewGuid();
|
||||
var verifierVersion = "2.0.0";
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
SetupValidDsseVerification(keyId);
|
||||
SetupValidRekorVerification();
|
||||
SetupValidTrustAnchor(anchorId, keyId);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = true,
|
||||
VerifierVersion = verifierVersion
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.Receipt);
|
||||
Assert.NotEmpty(result.Receipt.ReceiptId);
|
||||
Assert.Equal(bundleId.Value, result.Receipt.ProofBundleId);
|
||||
Assert.Equal(verifierVersion, result.Receipt.VerifierVersion);
|
||||
Assert.True(result.Receipt.TotalDurationMs >= 0);
|
||||
Assert.NotEmpty(result.Receipt.StepsSummary!);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_FailedVerification_ReceiptContainsFailureReason()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
|
||||
_proofStoreMock
|
||||
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ProofBundle?)null);
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _pipeline.VerifyAsync(request);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
|
||||
Assert.NotNull(result.Receipt.FailureReason);
|
||||
Assert.Contains("not found", result.Receipt.FailureReason);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cancellation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_Cancelled_ReturnsPartialResults()
|
||||
{
|
||||
// Arrange
|
||||
var bundleId = CreateTestBundleId();
|
||||
var keyId = "test-key-id";
|
||||
var cts = new CancellationTokenSource();
|
||||
|
||||
SetupValidProofBundle(bundleId, keyId);
|
||||
|
||||
// Setup DSSE verification to cancel
|
||||
_dsseVerifierMock
|
||||
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(async (DsseEnvelope _, CancellationToken ct) =>
|
||||
{
|
||||
await cts.CancelAsync();
|
||||
ct.ThrowIfCancellationRequested();
|
||||
return new DsseVerificationResult { IsValid = true, KeyId = keyId };
|
||||
});
|
||||
|
||||
var request = new VerificationPipelineRequest
|
||||
{
|
||||
ProofBundleId = bundleId,
|
||||
VerifyRekor = false
|
||||
};
|
||||
|
||||
// Act & Assert - should complete but show cancellation
|
||||
// The actual behavior depends on implementation
|
||||
var result = await _pipeline.VerifyAsync(request, cts.Token);
|
||||
// Pipeline may handle cancellation gracefully
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static ProofBundleId CreateTestBundleId()
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()));
|
||||
return new ProofBundleId($"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}");
|
||||
}
|
||||
|
||||
private void SetupValidProofBundle(ProofBundleId bundleId, string keyId, bool includeRekorEntry = true)
|
||||
{
|
||||
var bundle = new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:statement123",
|
||||
PredicateType = "https://stella-ops.io/v1/evidence",
|
||||
Predicate = new { test = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = Encoding.UTF8.GetBytes("{}"),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
|
||||
}
|
||||
}
|
||||
},
|
||||
RekorLogEntry = includeRekorEntry ? new RekorLogEntry
|
||||
{
|
||||
LogId = "test-log",
|
||||
LogIndex = 12345,
|
||||
InclusionProof = new InclusionProof
|
||||
{
|
||||
Hashes = new List<byte[]>(),
|
||||
TreeSize = 100,
|
||||
RootHash = new byte[32]
|
||||
},
|
||||
SignedTreeHead = new SignedTreeHead
|
||||
{
|
||||
TreeSize = 100,
|
||||
RootHash = new byte[32],
|
||||
Signature = new byte[64]
|
||||
}
|
||||
} : null
|
||||
};
|
||||
|
||||
_proofStoreMock
|
||||
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(bundle);
|
||||
}
|
||||
|
||||
private void SetupProofBundleWithMismatchedId(ProofBundleId bundleId, string keyId)
|
||||
{
|
||||
// Create a bundle that will compute to a different ID
|
||||
var bundle = new ProofBundle
|
||||
{
|
||||
Statements = new List<ProofStatement>
|
||||
{
|
||||
new ProofStatement
|
||||
{
|
||||
StatementId = "sha256:differentstatement",
|
||||
PredicateType = "https://stella-ops.io/v1/evidence",
|
||||
Predicate = new { different = "data" }
|
||||
}
|
||||
},
|
||||
Envelopes = new List<DsseEnvelope>
|
||||
{
|
||||
new DsseEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.in-toto+json",
|
||||
Payload = Encoding.UTF8.GetBytes("{\"different\":\"payload\"}"),
|
||||
Signatures = new List<DsseSignature>
|
||||
{
|
||||
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
_proofStoreMock
|
||||
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(bundle);
|
||||
}
|
||||
|
||||
private void SetupValidDsseVerification(string keyId)
|
||||
{
|
||||
_dsseVerifierMock
|
||||
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new DsseVerificationResult { IsValid = true, KeyId = keyId });
|
||||
}
|
||||
|
||||
private void SetupInvalidDsseVerification(string errorMessage)
|
||||
{
|
||||
_dsseVerifierMock
|
||||
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new DsseVerificationResult
|
||||
{
|
||||
IsValid = false,
|
||||
KeyId = "unknown",
|
||||
ErrorMessage = errorMessage
|
||||
});
|
||||
}
|
||||
|
||||
private void SetupValidRekorVerification()
|
||||
{
|
||||
_rekorVerifierMock
|
||||
.Setup(x => x.VerifyInclusionAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<InclusionProof>(),
|
||||
It.IsAny<SignedTreeHead>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new RekorVerificationResult { IsValid = true });
|
||||
}
|
||||
|
||||
private void SetupInvalidRekorVerification(string errorMessage)
|
||||
{
|
||||
_rekorVerifierMock
|
||||
.Setup(x => x.VerifyInclusionAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<long>(),
|
||||
It.IsAny<InclusionProof>(),
|
||||
It.IsAny<SignedTreeHead>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new RekorVerificationResult { IsValid = false, ErrorMessage = errorMessage });
|
||||
}
|
||||
|
||||
private void SetupValidTrustAnchor(Guid anchorId, string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = anchorId,
|
||||
AllowedKeyIds = new List<string> { keyId },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolverMock
|
||||
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(anchor);
|
||||
|
||||
_trustAnchorResolverMock
|
||||
.Setup(x => x.GetAnchorAsync(anchorId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(anchor);
|
||||
}
|
||||
|
||||
private void SetupTrustAnchorWithoutKey(Guid anchorId, string keyId)
|
||||
{
|
||||
var anchor = new TrustAnchorInfo
|
||||
{
|
||||
AnchorId = anchorId,
|
||||
AllowedKeyIds = new List<string> { "other-key-not-matching" },
|
||||
RevokedKeyIds = new List<string>()
|
||||
};
|
||||
|
||||
_trustAnchorResolverMock
|
||||
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(anchor);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user