save progress

This commit is contained in:
StellaOps Bot
2026-01-02 21:06:27 +02:00
parent f46bde5575
commit 3f197814c5
441 changed files with 21545 additions and 4306 deletions

View File

@@ -6,9 +6,9 @@ using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.OpenSsl;
using Org.BouncyCastle.Asn1.X9;
using StellaOps.Cryptography;
using StellaOps.AirGap.Importer.Validation;
using AttestorDsseEnvelope = StellaOps.Attestor.Envelope.DsseEnvelope;
using AttestorDsseSignature = StellaOps.Attestor.Envelope.DsseSignature;
using AttestorDssePreAuthenticationEncoding = StellaOps.Attestor.Envelope.DssePreAuthenticationEncoding;
using StellaOps.Attestor.Envelope;
namespace StellaOps.AirGap.Importer.Reconciliation.Signing;
@@ -43,7 +43,7 @@ internal sealed class EvidenceGraphDsseSigner
var canonicalJson = serializer.Serialize(graph, pretty: false);
var payloadBytes = Encoding.UTF8.GetBytes(canonicalJson);
var pae = DssePreAuthenticationEncoding.Encode(EvidenceGraphPayloadType, payloadBytes);
var pae = AttestorDssePreAuthenticationEncoding.Compute(EvidenceGraphPayloadType, payloadBytes);
var envelopeKey = LoadEcdsaEnvelopeKey(signingPrivateKeyPemPath, signingKeyId);
var signature = SignDeterministicEcdsa(pae, signingPrivateKeyPemPath, envelopeKey.AlgorithmId);

View File

@@ -8,3 +8,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| AUDIT-0026-M | DONE | Maintainability audit for StellaOps.AirGap.Importer. |
| AUDIT-0026-T | DONE | Test coverage audit for StellaOps.AirGap.Importer. |
| AUDIT-0026-A | DOING | Pending approval for changes. |
| VAL-SMOKE-001 | DONE | Resolved DSSE signer ambiguity; smoke build now proceeds. |

View File

@@ -53,19 +53,8 @@ public class DsseVerifierTests
private static byte[] BuildPae(string payloadType, string payload)
{
var parts = new[] { "DSSEv1", payloadType, payload };
var paeBuilder = new System.Text.StringBuilder();
paeBuilder.Append("PAE:");
paeBuilder.Append(parts.Length);
foreach (var part in parts)
{
paeBuilder.Append(' ');
paeBuilder.Append(part.Length);
paeBuilder.Append(' ');
paeBuilder.Append(part);
}
return System.Text.Encoding.UTF8.GetBytes(paeBuilder.ToString());
var payloadBytes = System.Text.Encoding.UTF8.GetBytes(payload);
return StellaOps.Attestor.Envelope.DssePreAuthenticationEncoding.Compute(payloadType, payloadBytes);
}
private static string Fingerprint(byte[] pub)

View File

@@ -94,6 +94,9 @@ public sealed class ImportValidatorTests
quarantine,
NullLogger<ImportValidator>.Instance);
var payloadEntries = new List<NamedStream> { new("a.txt", new MemoryStream("data"u8.ToArray())) };
var merkleRoot = new MerkleRootCalculator().ComputeRoot(payloadEntries);
var manifestJson = $"{{\"version\":\"1.0.0\",\"merkleRoot\":\"{merkleRoot}\"}}";
var tempRoot = Path.Combine(Path.GetTempPath(), "stellaops-airgap-tests", Guid.NewGuid().ToString("N"));
Directory.CreateDirectory(tempRoot);
var bundlePath = Path.Combine(tempRoot, "bundle.tar.zst");
@@ -106,7 +109,7 @@ public sealed class ImportValidatorTests
BundleType: "offline-kit",
BundleDigest: "sha256:bundle",
BundlePath: bundlePath,
ManifestJson: "{\"version\":\"1.0.0\"}",
ManifestJson: manifestJson,
ManifestVersion: "1.0.0",
ManifestCreatedAt: DateTimeOffset.Parse("2025-12-15T00:00:00Z"),
ForceActivate: false,
@@ -116,7 +119,7 @@ public sealed class ImportValidatorTests
RootJson: root,
SnapshotJson: snapshot,
TimestampJson: timestamp,
PayloadEntries: new List<NamedStream> { new("a.txt", new MemoryStream("data"u8.ToArray())) },
PayloadEntries: payloadEntries,
TrustStore: trustStore,
ApproverIds: new[] { "approver-1", "approver-2" });
@@ -146,19 +149,8 @@ public sealed class ImportValidatorTests
private static byte[] BuildPae(string payloadType, string payload)
{
var parts = new[] { "DSSEv1", payloadType, payload };
var paeBuilder = new System.Text.StringBuilder();
paeBuilder.Append("PAE:");
paeBuilder.Append(parts.Length);
foreach (var part in parts)
{
paeBuilder.Append(' ');
paeBuilder.Append(part.Length);
paeBuilder.Append(' ');
paeBuilder.Append(part);
}
return System.Text.Encoding.UTF8.GetBytes(paeBuilder.ToString());
var payloadBytes = System.Text.Encoding.UTF8.GetBytes(payload);
return StellaOps.Attestor.Envelope.DssePreAuthenticationEncoding.Compute(payloadType, payloadBytes);
}
private static string Fingerprint(byte[] pub) => Convert.ToHexString(SHA256.HashData(pub)).ToLowerInvariant();

View File

@@ -8,3 +8,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| AUDIT-0027-M | DONE | Maintainability audit for StellaOps.AirGap.Importer.Tests. |
| AUDIT-0027-T | DONE | Test coverage audit for StellaOps.AirGap.Importer.Tests. |
| AUDIT-0027-A | TODO | Pending approval for changes. |
| VAL-SMOKE-001 | DONE | Align DSSE PAE test data and manifest merkle root; unit tests pass. |

View File

@@ -16,6 +16,11 @@ public sealed record DsseDetachedPayloadReference
throw new ArgumentException("Detached payload digest must be provided.", nameof(sha256));
}
if (!IsSha256Digest(sha256))
{
throw new ArgumentException("Detached payload digest must be a 64-character hex SHA256 value.", nameof(sha256));
}
Uri = uri;
Sha256 = sha256.ToLowerInvariant();
Length = length;
@@ -29,4 +34,27 @@ public sealed record DsseDetachedPayloadReference
public long? Length { get; }
public string? MediaType { get; }
private static bool IsSha256Digest(string value)
{
if (value.Length != 64)
{
return false;
}
foreach (var ch in value)
{
if (!IsHex(ch))
{
return false;
}
}
return true;
}
private static bool IsHex(char ch)
=> (ch >= '0' && ch <= '9')
|| (ch >= 'a' && ch <= 'f')
|| (ch >= 'A' && ch <= 'F');
}

View File

@@ -1,8 +1,6 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
@@ -18,10 +16,25 @@ public static class DsseEnvelopeSerializer
options ??= new DsseEnvelopeSerializationOptions();
if (!options.EmitCompactJson && !options.EmitExpandedJson)
{
throw new InvalidOperationException("At least one JSON format must be emitted.");
}
if (options.CompressionAlgorithm != DsseCompressionAlgorithm.None)
{
throw new NotSupportedException("Payload compression is not supported during serialization. Compress the payload before envelope creation and ensure payloadType/metadata reflect the compressed bytes.");
}
var originalPayload = envelope.Payload.ToArray();
var processedPayload = ApplyCompression(originalPayload, options.CompressionAlgorithm);
var payloadSha256 = Convert.ToHexString(SHA256.HashData(originalPayload)).ToLowerInvariant();
var payloadBase64 = Convert.ToBase64String(processedPayload);
var payloadBase64 = Convert.ToBase64String(originalPayload);
if (envelope.DetachedPayload is not null
&& !string.Equals(payloadSha256, envelope.DetachedPayload.Sha256, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException("Detached payload digest does not match the envelope payload.");
}
byte[]? compactJson = null;
if (options.EmitCompactJson)
@@ -37,7 +50,7 @@ public static class DsseEnvelopeSerializer
payloadBase64,
payloadSha256,
originalPayload.Length,
processedPayload.Length,
originalPayload.Length,
options,
originalPayload);
}
@@ -47,7 +60,7 @@ public static class DsseEnvelopeSerializer
expandedJson,
payloadSha256,
originalPayload.Length,
processedPayload.Length,
originalPayload.Length, // No compression, so processed == original
options.CompressionAlgorithm,
envelope.DetachedPayload);
}
@@ -227,33 +240,6 @@ public static class DsseEnvelopeSerializer
}
}
private static byte[] ApplyCompression(byte[] payload, DsseCompressionAlgorithm algorithm)
{
return algorithm switch
{
DsseCompressionAlgorithm.None => payload,
DsseCompressionAlgorithm.Gzip => CompressWithStream(payload, static (stream) => new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)),
DsseCompressionAlgorithm.Brotli => CompressWithStream(payload, static (stream) => new BrotliStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)),
_ => throw new NotSupportedException($"Compression algorithm '{algorithm}' is not supported.")
};
}
private static byte[] CompressWithStream(byte[] payload, Func<Stream, Stream> streamFactory)
{
if (payload.Length == 0)
{
return Array.Empty<byte>();
}
using var output = new MemoryStream();
using (var compressionStream = streamFactory(output))
{
compressionStream.Write(payload);
}
return output.ToArray();
}
private static string GetCompressionName(DsseCompressionAlgorithm algorithm)
{
return algorithm switch

View File

@@ -0,0 +1,47 @@
using System;
using System.Buffers;
using System.Globalization;
using System.Text;
namespace StellaOps.Attestor.Envelope;
/// <summary>
/// Computes DSSE pre-authentication encoding (PAE) for payload signing.
/// </summary>
public static class DssePreAuthenticationEncoding
{
private static readonly byte[] Prefix = Encoding.ASCII.GetBytes("DSSEv1");
private static readonly byte[] Space = new byte[] { (byte)' ' };
public static byte[] Compute(string payloadType, ReadOnlySpan<byte> payload)
{
if (payloadType is null)
{
throw new ArgumentNullException(nameof(payloadType));
}
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType);
var payloadTypeLength = Encoding.ASCII.GetBytes(payloadTypeBytes.Length.ToString(CultureInfo.InvariantCulture));
var payloadLength = Encoding.ASCII.GetBytes(payload.Length.ToString(CultureInfo.InvariantCulture));
var buffer = new ArrayBufferWriter<byte>();
Write(buffer, Prefix);
Write(buffer, Space);
Write(buffer, payloadTypeLength);
Write(buffer, Space);
Write(buffer, payloadTypeBytes);
Write(buffer, Space);
Write(buffer, payloadLength);
Write(buffer, Space);
Write(buffer, payload);
return buffer.WrittenSpan.ToArray();
}
private static void Write(ArrayBufferWriter<byte> writer, ReadOnlySpan<byte> bytes)
{
var span = writer.GetSpan(bytes.Length);
bytes.CopyTo(span);
writer.Advance(bytes.Length);
}
}

View File

@@ -11,6 +11,8 @@ public sealed record DsseSignature
throw new ArgumentException("Signature must be provided.", nameof(signature));
}
ValidateBase64(signature);
Signature = signature;
KeyId = keyId;
}
@@ -28,4 +30,19 @@ public sealed record DsseSignature
return new DsseSignature(Convert.ToBase64String(signature), keyId);
}
private static void ValidateBase64(string signature)
{
try
{
if (Convert.FromBase64String(signature).Length == 0)
{
throw new ArgumentException("Signature must not decode to an empty byte array.", nameof(signature));
}
}
catch (FormatException ex)
{
throw new ArgumentException("Signature must be valid base64.", nameof(signature), ex);
}
}
}

View File

@@ -30,6 +30,19 @@ public sealed class EnvelopeSignatureService
};
}
public EnvelopeResult<EnvelopeSignature> SignDsse(string payloadType, ReadOnlySpan<byte> payload, EnvelopeKey key, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(payloadType))
{
throw new ArgumentException("payloadType must be provided.", nameof(payloadType));
}
cancellationToken.ThrowIfCancellationRequested();
var pae = DssePreAuthenticationEncoding.Compute(payloadType, payload);
return Sign(pae, key, cancellationToken);
}
public EnvelopeResult<bool> Verify(ReadOnlySpan<byte> payload, EnvelopeSignature signature, EnvelopeKey key, CancellationToken cancellationToken = default)
{
if (signature is null)
@@ -67,6 +80,19 @@ public sealed class EnvelopeSignatureService
};
}
public EnvelopeResult<bool> VerifyDsse(string payloadType, ReadOnlySpan<byte> payload, EnvelopeSignature signature, EnvelopeKey key, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(payloadType))
{
throw new ArgumentException("payloadType must be provided.", nameof(payloadType));
}
cancellationToken.ThrowIfCancellationRequested();
var pae = DssePreAuthenticationEncoding.Compute(payloadType, payload);
return Verify(pae, signature, key, cancellationToken);
}
private static EnvelopeResult<EnvelopeSignature> SignEd25519(ReadOnlySpan<byte> payload, EnvelopeKey key)
{
if (!key.HasPrivateMaterial)

View File

@@ -5,7 +5,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0051-M | DONE | Maintainability audit for StellaOps.Attestor.Envelope. |
| AUDIT-0051-T | DONE | Test coverage audit for StellaOps.Attestor.Envelope. |
| AUDIT-0051-A | TODO | Pending approval for changes. |
| AUDIT-0051-A | DONE | Applied audit remediation for envelope signing/serialization. |

View File

@@ -1,13 +1,9 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.Envelope;
using Xunit;
using StellaOps.TestKit;
namespace StellaOps.Attestor.Envelope.Tests;
@@ -50,8 +46,8 @@ public sealed class DsseEnvelopeSerializerTests
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Serialize_WithCompressionEnabled_EmbedsCompressedPayloadMetadata()
[Fact]
public void Serialize_WithCompressionEnabled_Throws()
{
var payload = Encoding.UTF8.GetBytes("{\"foo\":\"bar\",\"count\":1}");
var envelope = new DsseEnvelope(
@@ -65,30 +61,7 @@ public sealed class DsseEnvelopeSerializerTests
CompressionAlgorithm = DsseCompressionAlgorithm.Gzip
};
var result = DsseEnvelopeSerializer.Serialize(envelope, options);
Assert.NotNull(result.CompactJson);
var compactDoc = JsonDocument.Parse(result.CompactJson!);
var payloadBase64 = compactDoc.RootElement.GetProperty("payload").GetString();
Assert.False(string.IsNullOrEmpty(payloadBase64));
var compressedBytes = Convert.FromBase64String(payloadBase64!);
using var compressedStream = new MemoryStream(compressedBytes);
using var gzip = new GZipStream(compressedStream, CompressionMode.Decompress);
using var decompressed = new MemoryStream();
gzip.CopyTo(decompressed);
Assert.True(payload.SequenceEqual(decompressed.ToArray()));
using var expanded = JsonDocument.Parse(result.ExpandedJson!);
var info = expanded.RootElement.GetProperty("payloadInfo");
Assert.Equal(payload.Length, info.GetProperty("length").GetInt32());
var compression = info.GetProperty("compression");
Assert.Equal("gzip", compression.GetProperty("algorithm").GetString());
Assert.Equal(compressedBytes.Length, compression.GetProperty("compressedLength").GetInt32());
Assert.Equal(DsseCompressionAlgorithm.Gzip, result.Compression);
Assert.Equal(payload.Length, result.OriginalPayloadLength);
Assert.Equal(compressedBytes.Length, result.EmbeddedPayloadLength);
Assert.Throws<NotSupportedException>(() => DsseEnvelopeSerializer.Serialize(envelope, options));
}
[Trait("Category", TestCategories.Unit)]
@@ -96,9 +69,10 @@ public sealed class DsseEnvelopeSerializerTests
public void Serialize_WithDetachedReference_WritesMetadata()
{
var payload = Encoding.UTF8.GetBytes("detached payload preview");
var payloadSha256 = Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(payload)).ToLowerInvariant();
var reference = new DsseDetachedPayloadReference(
"https://evidence.example.com/sbom.json",
"abc123",
payloadSha256,
payload.Length,
"application/json");
@@ -123,7 +97,28 @@ public sealed class DsseEnvelopeSerializerTests
}
[Trait("Category", TestCategories.Unit)]
[Fact]
[Fact]
public void Serialize_WithDetachedReferenceMismatch_Throws()
{
var payload = Encoding.UTF8.GetBytes("detached payload preview");
var reference = new DsseDetachedPayloadReference(
"https://evidence.example.com/sbom.json",
new string('a', 64),
payload.Length,
"application/json");
var envelope = new DsseEnvelope(
"application/vnd.in-toto+json",
payload,
new[] { new DsseSignature("AQID") },
"text/plain",
reference);
Assert.Throws<InvalidOperationException>(() => DsseEnvelopeSerializer.Serialize(envelope));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Serialize_CompactOnly_SkipsExpandedPayload()
{
var payload = Encoding.UTF8.GetBytes("payload");
@@ -142,4 +137,23 @@ public sealed class DsseEnvelopeSerializerTests
Assert.NotNull(result.CompactJson);
Assert.Null(result.ExpandedJson);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void Serialize_WithNoFormats_Throws()
{
var payload = Encoding.UTF8.GetBytes("payload");
var envelope = new DsseEnvelope(
"application/vnd.in-toto+json",
payload,
new[] { new DsseSignature("AQID") });
var options = new DsseEnvelopeSerializationOptions
{
EmitCompactJson = false,
EmitExpandedJson = false
};
Assert.Throws<InvalidOperationException>(() => DsseEnvelopeSerializer.Serialize(envelope, options));
}
}

View File

@@ -0,0 +1,58 @@
using System;
using System.Security.Cryptography;
using System.Text;
using StellaOps.Attestor.Envelope;
using StellaOps.Cryptography;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Attestor.Envelope.Tests;
public sealed class EnvelopeSignatureServiceTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public void DssePreAuthenticationEncoding_UsesAsciiLengths()
{
var payloadType = "application/vnd.in-toto+json";
var payload = Encoding.UTF8.GetBytes("hello");
var pae = DssePreAuthenticationEncoding.Compute(payloadType, payload);
var expected = $"DSSEv1 {Encoding.UTF8.GetByteCount(payloadType)} {payloadType} {payload.Length} hello";
Assert.Equal(expected, Encoding.UTF8.GetString(pae));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void SignDsse_MatchesSignOnPreAuthenticationEncoding()
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
var key = EnvelopeKey.CreateEcdsaSigner(SignatureAlgorithms.Es256, parameters, "test-key");
var service = new EnvelopeSignatureService();
var payloadType = "application/vnd.in-toto+json";
var payload = Encoding.UTF8.GetBytes("payload");
var pae = DssePreAuthenticationEncoding.Compute(payloadType, payload);
var direct = service.Sign(pae, key);
var viaDsse = service.SignDsse(payloadType, payload, key);
Assert.True(direct.IsSuccess);
Assert.True(viaDsse.IsSuccess);
Assert.Equal(direct.Value.KeyId, viaDsse.Value.KeyId);
Assert.Equal(direct.Value.AlgorithmId, viaDsse.Value.AlgorithmId);
var verifyDirect = service.Verify(pae, direct.Value, key);
var verify = service.VerifyDsse(payloadType, payload, viaDsse.Value, key);
Assert.True(verifyDirect.IsSuccess);
Assert.True(verify.IsSuccess);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void DsseSignature_WithInvalidBase64_Throws()
{
Assert.Throws<ArgumentException>(() => new DsseSignature("not base64"));
}
}

View File

@@ -8,3 +8,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| AUDIT-0052-M | DONE | Maintainability audit for StellaOps.Attestor.Envelope.Tests. |
| AUDIT-0052-T | DONE | Test coverage audit for StellaOps.Attestor.Envelope.Tests. |
| AUDIT-0052-A | TODO | Pending approval for changes. |
| VAL-SMOKE-001 | DONE | Stabilized DSSE signature tests under xUnit v3. |

View File

@@ -3,7 +3,8 @@ using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
var generator = new Generator();
var options = GeneratorOptions.Parse(args);
var generator = new Generator(options);
generator.Run();
internal sealed class Generator
@@ -14,15 +15,17 @@ internal sealed class Generator
private readonly string _schemaDir;
private readonly string _tsDir;
private readonly string _goDir;
private readonly bool _pruneStaleSchemas;
public Generator()
public Generator(GeneratorOptions options)
{
_registry = TypeRegistry.Build();
_repoRoot = ResolveRepoRoot();
_repoRoot = ResolveRepoRoot(options.RepoRoot);
_moduleRoot = Path.Combine(_repoRoot, "src", "Attestor", "StellaOps.Attestor.Types");
_schemaDir = Path.Combine(_moduleRoot, "schemas");
_tsDir = Path.Combine(_moduleRoot, "generated", "ts");
_goDir = Path.Combine(_moduleRoot, "generated", "go");
_pruneStaleSchemas = options.PruneStaleSchemas;
}
public void Run()
@@ -31,11 +34,13 @@ internal sealed class Generator
Directory.CreateDirectory(_tsDir);
Directory.CreateDirectory(_goDir);
var expectedSchemas = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var root in _registry.RootObjects)
{
var schema = SchemaBuilder.Build(root);
var schemaPath = Path.Combine(_schemaDir, $"{root.SchemaFileStem}.schema.json");
WriteUtf8File(schemaPath, schema);
expectedSchemas.Add(schemaPath);
}
var tsCode = TypeScriptEmitter.Emit(_registry);
@@ -43,17 +48,72 @@ internal sealed class Generator
var goCode = GoEmitter.Emit(_registry);
WriteUtf8File(Path.Combine(_goDir, "types.go"), goCode);
if (_pruneStaleSchemas)
{
PruneStaleSchemas(expectedSchemas);
}
}
private static string ResolveRepoRoot()
private static string ResolveRepoRoot(string? overridePath)
{
var current = new DirectoryInfo(AppContext.BaseDirectory);
for (var i = 0; i < 8; i++)
if (!string.IsNullOrWhiteSpace(overridePath))
{
current = current?.Parent ?? throw new InvalidOperationException("Unable to locate repository root.");
var normalized = Path.GetFullPath(overridePath);
if (!Directory.Exists(normalized))
{
throw new DirectoryNotFoundException($"Repository root override not found: {normalized}");
}
return normalized;
}
return current!.FullName;
var fromCurrent = FindRepoRoot(Directory.GetCurrentDirectory());
if (fromCurrent is not null)
{
return fromCurrent;
}
var fromBase = FindRepoRoot(AppContext.BaseDirectory);
if (fromBase is not null)
{
return fromBase;
}
throw new InvalidOperationException("Unable to locate repository root.");
}
private static string? FindRepoRoot(string startPath)
{
var current = new DirectoryInfo(startPath);
while (current is not null)
{
var gitPath = Path.Combine(current.FullName, ".git");
if (Directory.Exists(gitPath))
{
return current.FullName;
}
current = current.Parent;
}
return null;
}
private void PruneStaleSchemas(HashSet<string> expectedSchemas)
{
foreach (var file in Directory.EnumerateFiles(_schemaDir, "*.schema.json", SearchOption.TopDirectoryOnly))
{
if (!expectedSchemas.Contains(file))
{
if (!IsGeneratedSchema(file))
{
continue;
}
File.Delete(file);
}
}
}
private static void WriteUtf8File(string path, string content)
@@ -61,6 +121,50 @@ internal sealed class Generator
var normalized = content.Replace("\r\n", "\n", StringComparison.Ordinal);
File.WriteAllText(path, normalized, new UTF8Encoding(encoderShouldEmitUTF8Identifier: false));
}
private static bool IsGeneratedSchema(string path)
{
using var stream = File.OpenRead(path);
using var doc = JsonDocument.Parse(stream);
if (!doc.RootElement.TryGetProperty("$comment", out var comment))
{
return false;
}
return string.Equals(comment.GetString(), "Generated by StellaOps.Attestor.Types.Generator.", StringComparison.Ordinal);
}
}
internal sealed record GeneratorOptions(string? RepoRoot, bool PruneStaleSchemas)
{
public static GeneratorOptions Parse(string[] args)
{
string? repoRoot = null;
var pruneStaleSchemas = true;
for (var i = 0; i < args.Length; i++)
{
var arg = args[i];
if (string.Equals(arg, "--repo-root", StringComparison.OrdinalIgnoreCase))
{
if (i + 1 >= args.Length)
{
throw new ArgumentException("Missing value for --repo-root.");
}
repoRoot = args[++i];
continue;
}
if (string.Equals(arg, "--no-prune", StringComparison.OrdinalIgnoreCase))
{
pruneStaleSchemas = false;
}
}
return new GeneratorOptions(repoRoot, pruneStaleSchemas);
}
}
internal sealed class TypeRegistry
@@ -468,7 +572,8 @@ internal static class SchemaBuilder
var schema = new JsonObject
{
["$schema"] = "https://json-schema.org/draft/2020-12/schema",
["$id"] = $"https://stella-ops.org/schemas/attestor/{root.SchemaFileStem}.json",
["$id"] = $"https://stella-ops.org/schemas/attestor/{root.SchemaFileStem}.schema.json",
["$comment"] = "Generated by StellaOps.Attestor.Types.Generator.",
["title"] = root.Summary,
["type"] = "object",
["additionalProperties"] = false
@@ -678,6 +783,14 @@ internal static class TypeScriptEmitter
builder.AppendLine();
}
foreach (var obj in orderedObjects)
{
var orderedKeys = obj.Properties.Select(p => p.Name).OrderBy(n => n, StringComparer.Ordinal);
var keysLiteral = string.Join(", ", orderedKeys.Select(key => $"'{key.Replace("'", "\\'")}'"));
AppendLine(builder, 0, $"const {obj.Name}Keys = Object.freeze([{keysLiteral}] as const);");
builder.AppendLine();
}
AppendLine(builder, 0, "function isRecord(value: unknown): value is Record<string, unknown> {");
AppendLine(builder, 1, "return typeof value === 'object' && value !== null && !Array.isArray(value);");
AppendLine(builder, 0, "}");
@@ -688,12 +801,22 @@ internal static class TypeScriptEmitter
AppendLine(builder, 0, "}");
builder.AppendLine();
AppendLine(builder, 0, "function assertNoUnknownKeys(value: Record<string, unknown>, allowed: readonly string[], path: string[]): void {");
AppendLine(builder, 1, "for (const key of Object.keys(value)) {");
AppendLine(builder, 2, "if (!allowed.includes(key)) {");
AppendLine(builder, 3, "throw new Error(`${pathString(path)} has unknown property '${key}'.`);");
AppendLine(builder, 2, "}");
AppendLine(builder, 1, "}");
AppendLine(builder, 0, "}");
builder.AppendLine();
foreach (var obj in orderedObjects)
{
AppendLine(builder, 0, $"function assert{obj.Name}(value: unknown, path: string[]): asserts value is {obj.Name} {{");
AppendLine(builder, 1, "if (!isRecord(value)) {");
AppendLine(builder, 2, "throw new Error(`${pathString(path)} must be an object.`);");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, $"assertNoUnknownKeys(value, {obj.Name}Keys, path);");
foreach (var property in obj.Properties)
{
@@ -734,23 +857,55 @@ internal static class TypeScriptEmitter
}
AppendLine(builder, 0, "function canonicalStringify(input: unknown): string {");
AppendLine(builder, 1, "return JSON.stringify(sortValue(input));");
AppendLine(builder, 1, "return canonicalizeValue(input);");
AppendLine(builder, 0, "}");
builder.AppendLine();
AppendLine(builder, 0, "function sortValue(value: unknown): unknown {");
AppendLine(builder, 0, "function canonicalizeValue(value: unknown): string {");
AppendLine(builder, 1, "if (value === null) {");
AppendLine(builder, 2, "return 'null';");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "if (typeof value === 'string') {");
AppendLine(builder, 2, "return JSON.stringify(value);");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "if (typeof value === 'number') {");
AppendLine(builder, 2, "return formatNumber(value);");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "if (typeof value === 'boolean') {");
AppendLine(builder, 2, "return value ? 'true' : 'false';");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "if (Array.isArray(value)) {");
AppendLine(builder, 2, "return value.map(sortValue);");
AppendLine(builder, 2, "return `[${value.map(canonicalizeValue).join(',')}]`;");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "if (isRecord(value)) {");
AppendLine(builder, 2, "const ordered: Record<string, unknown> = {};");
AppendLine(builder, 2, "const keys = Object.keys(value).sort();");
AppendLine(builder, 2, "for (const key of keys) {");
AppendLine(builder, 3, "ordered[key] = sortValue(value[key]);");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "return ordered;");
AppendLine(builder, 2, "return canonicalizeObject(value);");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return value;");
AppendLine(builder, 1, "throw new Error('Unsupported value for canonical JSON.');");
AppendLine(builder, 0, "}");
builder.AppendLine();
AppendLine(builder, 0, "function canonicalizeObject(value: Record<string, unknown>): string {");
AppendLine(builder, 1, "const keys = Object.keys(value).sort();");
AppendLine(builder, 1, "const entries: string[] = [];");
AppendLine(builder, 1, "for (const key of keys) {");
AppendLine(builder, 2, "const entry = value[key];");
AppendLine(builder, 2, "if (entry === undefined) {");
AppendLine(builder, 3, "continue;");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "entries.push(`${JSON.stringify(key)}:${canonicalizeValue(entry)}`);");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return `{${entries.join(',')}}`;");
AppendLine(builder, 0, "}");
builder.AppendLine();
AppendLine(builder, 0, "function formatNumber(value: number): string {");
AppendLine(builder, 1, "if (!Number.isFinite(value)) {");
AppendLine(builder, 2, "throw new Error('Non-finite numbers are not allowed in canonical JSON.');");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "if (Object.is(value, -0)) {");
AppendLine(builder, 2, "return '0';");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return value.toString();");
AppendLine(builder, 0, "}");
builder.AppendLine();
@@ -932,13 +1087,40 @@ internal static class GoEmitter
AppendLine(builder, 0, "package attesttypes");
builder.AppendLine();
var patternRegistry = PatternRegistry.Build(registry);
var imports = new List<string>
{
"bytes",
"encoding/json",
"errors",
"fmt",
"sort"
};
if (patternRegistry.HasPatterns)
{
imports.Add("regexp");
}
AppendLine(builder, 0, "import (");
AppendLine(builder, 1, "\"encoding/json\"");
AppendLine(builder, 1, "\"errors\"");
AppendLine(builder, 1, "\"fmt\"");
foreach (var importName in imports)
{
AppendLine(builder, 1, $"\"{importName}\"");
}
AppendLine(builder, 0, ")");
builder.AppendLine();
if (patternRegistry.HasPatterns)
{
AppendLine(builder, 0, "var (");
foreach (var pattern in patternRegistry.Patterns)
{
AppendLine(builder, 1, $"{pattern.Value} = regexp.MustCompile(\"{EscapeGoString(pattern.Key)}\")");
}
AppendLine(builder, 0, ")");
builder.AppendLine();
}
foreach (var enumSpec in registry.Enums.Values.OrderBy(e => e.Name, StringComparer.Ordinal))
{
EmitEnum(builder, enumSpec);
@@ -959,7 +1141,9 @@ internal static class GoEmitter
{
EmitStruct(builder, obj);
builder.AppendLine();
EmitValidateMethod(builder, obj);
EmitUnmarshalMethod(builder, obj);
builder.AppendLine();
EmitValidateMethod(builder, obj, patternRegistry);
builder.AppendLine();
}
@@ -969,9 +1153,50 @@ internal static class GoEmitter
builder.AppendLine();
}
EmitCanonicalHelpers(builder);
return builder.ToString();
}
private sealed record PatternRegistry(IReadOnlyDictionary<string, string> Patterns)
{
public bool HasPatterns => Patterns.Count > 0;
public static PatternRegistry Build(TypeRegistry registry)
{
var patterns = registry.Objects.Values
.SelectMany(o => o.Properties)
.Select(p => p.Type)
.OfType<PrimitiveShape>()
.Where(p => p.Kind == PrimitiveKind.String && !string.IsNullOrWhiteSpace(p.Pattern))
.Select(p => p.Pattern!)
.Distinct(StringComparer.Ordinal)
.OrderBy(p => p, StringComparer.Ordinal)
.ToList();
var map = new Dictionary<string, string>(StringComparer.Ordinal);
for (var i = 0; i < patterns.Count; i++)
{
map[patterns[i]] = $"pattern{i}";
}
return new PatternRegistry(map);
}
public string GetPatternName(string pattern)
{
if (!Patterns.TryGetValue(pattern, out var name))
{
throw new InvalidOperationException($"Pattern not registered: {pattern}");
}
return name;
}
}
private static string EscapeGoString(string value)
=> value.Replace("\\", "\\\\").Replace("\"", "\\\"");
private static void EmitEnum(StringBuilder builder, EnumSpec enumSpec)
{
AppendLine(builder, 0, $"type {enumSpec.Name} string");
@@ -1005,7 +1230,22 @@ internal static class GoEmitter
AppendLine(builder, 0, "}");
}
private static void EmitValidateMethod(StringBuilder builder, ObjectSpec obj)
private static void EmitUnmarshalMethod(StringBuilder builder, ObjectSpec obj)
{
AppendLine(builder, 0, $"func (value *{obj.Name}) UnmarshalJSON(data []byte) error {{");
AppendLine(builder, 1, $"type Alias {obj.Name}");
AppendLine(builder, 1, "dec := json.NewDecoder(bytes.NewReader(data))");
AppendLine(builder, 1, "dec.DisallowUnknownFields()");
AppendLine(builder, 1, "var aux Alias");
AppendLine(builder, 1, "if err := dec.Decode(&aux); err != nil {");
AppendLine(builder, 2, $"return fmt.Errorf(\"failed to decode {obj.Name}: %w\", err)");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, $"*value = {obj.Name}(aux)");
AppendLine(builder, 1, "return nil");
AppendLine(builder, 0, "}");
}
private static void EmitValidateMethod(StringBuilder builder, ObjectSpec obj, PatternRegistry patternRegistry)
{
AppendLine(builder, 0, $"func (value *{obj.Name}) Validate() error {{");
AppendLine(builder, 1, "if value == nil {");
@@ -1014,7 +1254,7 @@ internal static class GoEmitter
foreach (var property in obj.Properties)
{
EmitPropertyValidation(builder, property, $"value.{ToExported(property.Name)}", $"{obj.Name}.{ToExported(property.Name)}", 1);
EmitPropertyValidation(builder, property, $"value.{ToExported(property.Name)}", $"{obj.Name}.{ToExported(property.Name)}", patternRegistry, 1);
}
AppendLine(builder, 1, "return nil");
@@ -1027,20 +1267,100 @@ internal static class GoEmitter
AppendLine(builder, 1, "if err := value.Validate(); err != nil {");
AppendLine(builder, 2, "return nil, err");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "buf, err := json.Marshal(value)");
AppendLine(builder, 1, "raw, err := json.Marshal(value)");
AppendLine(builder, 1, "if err != nil {");
AppendLine(builder, 2, $"return nil, fmt.Errorf(\"failed to marshal {typeName}: %w\", err)");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return buf, nil");
AppendLine(builder, 1, "dec := json.NewDecoder(bytes.NewReader(raw))");
AppendLine(builder, 1, "dec.UseNumber()");
AppendLine(builder, 1, "var decoded any");
AppendLine(builder, 1, "if err := dec.Decode(&decoded); err != nil {");
AppendLine(builder, 2, $"return nil, fmt.Errorf(\"failed to parse {typeName}: %w\", err)");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return canonicalizeJSON(decoded)");
AppendLine(builder, 0, "}");
}
private static void EmitPropertyValidation(StringBuilder builder, PropertySpec property, string accessor, string path, int indent)
private static void EmitCanonicalHelpers(StringBuilder builder)
{
AppendLine(builder, 0, "func canonicalizeJSON(value any) ([]byte, error) {");
AppendLine(builder, 1, "var buf bytes.Buffer");
AppendLine(builder, 1, "if err := writeCanonicalValue(&buf, value); err != nil {");
AppendLine(builder, 2, "return nil, err");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return buf.Bytes(), nil");
AppendLine(builder, 0, "}");
builder.AppendLine();
AppendLine(builder, 0, "func writeCanonicalValue(buf *bytes.Buffer, value any) error {");
AppendLine(builder, 1, "switch v := value.(type) {");
AppendLine(builder, 1, "case nil:");
AppendLine(builder, 2, "buf.WriteString(\"null\")");
AppendLine(builder, 1, "case bool:");
AppendLine(builder, 2, "if v {");
AppendLine(builder, 3, "buf.WriteString(\"true\")");
AppendLine(builder, 2, "} else {");
AppendLine(builder, 3, "buf.WriteString(\"false\")");
AppendLine(builder, 2, "}");
AppendLine(builder, 1, "case string:");
AppendLine(builder, 2, "encoded, err := json.Marshal(v)");
AppendLine(builder, 2, "if err != nil {");
AppendLine(builder, 3, "return err");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "buf.Write(encoded)");
AppendLine(builder, 1, "case json.Number:");
AppendLine(builder, 2, "text := v.String()");
AppendLine(builder, 2, "if text == \"-0\" {");
AppendLine(builder, 3, "text = \"0\"");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "buf.WriteString(text)");
AppendLine(builder, 1, "case []any:");
AppendLine(builder, 2, "buf.WriteByte('[')");
AppendLine(builder, 2, "for i, item := range v {");
AppendLine(builder, 3, "if i > 0 {");
AppendLine(builder, 4, "buf.WriteByte(',')");
AppendLine(builder, 3, "}");
AppendLine(builder, 3, "if err := writeCanonicalValue(buf, item); err != nil {");
AppendLine(builder, 4, "return err");
AppendLine(builder, 3, "}");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "buf.WriteByte(']')");
AppendLine(builder, 1, "case map[string]any:");
AppendLine(builder, 2, "keys := make([]string, 0, len(v))");
AppendLine(builder, 2, "for key := range v {");
AppendLine(builder, 3, "keys = append(keys, key)");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "sort.Strings(keys)");
AppendLine(builder, 2, "buf.WriteByte('{')");
AppendLine(builder, 2, "for i, key := range keys {");
AppendLine(builder, 3, "if i > 0 {");
AppendLine(builder, 4, "buf.WriteByte(',')");
AppendLine(builder, 3, "}");
AppendLine(builder, 3, "encoded, err := json.Marshal(key)");
AppendLine(builder, 3, "if err != nil {");
AppendLine(builder, 4, "return err");
AppendLine(builder, 3, "}");
AppendLine(builder, 3, "buf.Write(encoded)");
AppendLine(builder, 3, "buf.WriteByte(':')");
AppendLine(builder, 3, "if err := writeCanonicalValue(buf, v[key]); err != nil {");
AppendLine(builder, 4, "return err");
AppendLine(builder, 3, "}");
AppendLine(builder, 2, "}");
AppendLine(builder, 2, "buf.WriteByte('}')");
AppendLine(builder, 1, "default:");
AppendLine(builder, 2, "return fmt.Errorf(\"unsupported canonical type %T\", value)");
AppendLine(builder, 1, "}");
AppendLine(builder, 1, "return nil");
AppendLine(builder, 0, "}");
builder.AppendLine();
}
private static void EmitPropertyValidation(StringBuilder builder, PropertySpec property, string accessor, string path, PatternRegistry patternRegistry, int indent)
{
switch (property.Type)
{
case PrimitiveShape primitive:
EmitPrimitiveValidation(builder, primitive, accessor, path, property.Required, indent);
EmitPrimitiveValidation(builder, primitive, accessor, path, property.Required, patternRegistry, indent);
break;
case EnumShape enumShape:
EmitEnumValidation(builder, enumShape, accessor, path, property.Required, indent);
@@ -1057,10 +1377,10 @@ internal static class GoEmitter
}
}
private static void EmitPrimitiveValidation(StringBuilder builder, PrimitiveShape primitive, string accessor, string path, bool required, int indent)
private static void EmitPrimitiveValidation(StringBuilder builder, PrimitiveShape primitive, string accessor, string path, bool required, PatternRegistry patternRegistry, int indent)
{
var pointer = UsesPointer(primitive, required);
if (!TryBuildPrimitiveChecks(primitive, pointer ? $"*{accessor}" : accessor, path, out var lines))
if (!TryBuildPrimitiveChecks(primitive, pointer ? $"*{accessor}" : accessor, path, patternRegistry, out var lines))
{
return;
}
@@ -1083,7 +1403,7 @@ internal static class GoEmitter
}
}
private static bool TryBuildPrimitiveChecks(PrimitiveShape primitive, string target, string path, out List<string> lines)
private static bool TryBuildPrimitiveChecks(PrimitiveShape primitive, string target, string path, PatternRegistry patternRegistry, out List<string> lines)
{
lines = new List<string>();
@@ -1108,7 +1428,14 @@ internal static class GoEmitter
lines.Add("}");
}
// No pattern validation for now.
if (primitive.Kind == PrimitiveKind.String && !string.IsNullOrWhiteSpace(primitive.Pattern))
{
var patternName = patternRegistry.GetPatternName(primitive.Pattern!);
lines.Add($"if !{patternName}.MatchString({target}) {{");
lines.Add($"\treturn fmt.Errorf(\"{path} must match {primitive.Pattern}\")");
lines.Add("}");
}
if (lines.Count == 0)
{
return false;

View File

@@ -4,6 +4,6 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0069-M | DONE | Maintainability audit for StellaOps.Attestor.Types.Generator. |
| AUDIT-0069-T | DONE | Test coverage audit for StellaOps.Attestor.Types.Generator. |
| AUDIT-0069-A | TODO | Pending approval for changes. |
| AUDIT-0069-A | DONE | Applied repo-root override, schema id fix, canonicalization, strict validation, prune, and tests. |

View File

@@ -2,9 +2,17 @@
package attesttypes
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"sort"
"regexp"
)
var (
pattern0 = regexp.MustCompile("^[A-Fa-f0-9]{64}$")
pattern1 = regexp.MustCompile("^sha256:[A-Fa-f0-9]{64}$")
)
type FindingStatus string
@@ -191,6 +199,18 @@ type BuildMetadata struct {
BuildInvocationId *string `json:"buildInvocationId,omitempty"`
}
func (value *BuildMetadata) UnmarshalJSON(data []byte) error {
type Alias BuildMetadata
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode BuildMetadata: %w", err)
}
*value = BuildMetadata(aux)
return nil
}
func (value *BuildMetadata) Validate() error {
if value == nil {
return errors.New("BuildMetadata is nil")
@@ -207,6 +227,18 @@ type BuildProvenance struct {
Environment *EnvironmentMetadata `json:"environment,omitempty"`
}
func (value *BuildProvenance) UnmarshalJSON(data []byte) error {
type Alias BuildProvenance
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode BuildProvenance: %w", err)
}
*value = BuildProvenance(aux)
return nil
}
func (value *BuildProvenance) Validate() error {
if value == nil {
return errors.New("BuildProvenance is nil")
@@ -242,6 +274,18 @@ type BuilderIdentity struct {
Platform *string `json:"platform,omitempty"`
}
func (value *BuilderIdentity) UnmarshalJSON(data []byte) error {
type Alias BuilderIdentity
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode BuilderIdentity: %w", err)
}
*value = BuilderIdentity(aux)
return nil
}
func (value *BuilderIdentity) Validate() error {
if value == nil {
return errors.New("BuilderIdentity is nil")
@@ -257,6 +301,18 @@ type CustomEvidence struct {
Properties []CustomProperty `json:"properties,omitempty"`
}
func (value *CustomEvidence) UnmarshalJSON(data []byte) error {
type Alias CustomEvidence
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode CustomEvidence: %w", err)
}
*value = CustomEvidence(aux)
return nil
}
func (value *CustomEvidence) Validate() error {
if value == nil {
return errors.New("CustomEvidence is nil")
@@ -264,6 +320,9 @@ func (value *CustomEvidence) Validate() error {
if value.SchemaVersion != "StellaOps.CustomEvidence@1" {
return fmt.Errorf("CustomEvidence.SchemaVersion must equal StellaOps.CustomEvidence@1")
}
if !pattern1.MatchString(value.SubjectDigest) {
return fmt.Errorf("CustomEvidence.SubjectDigest must match ^sha256:[A-Fa-f0-9]{64}$")
}
for i := range value.Properties {
if err := value.Properties[i].Validate(); err != nil {
return fmt.Errorf("invalid CustomEvidence.Properties[%d]: %w", i, err)
@@ -277,6 +336,18 @@ type CustomProperty struct {
Value string `json:"value"`
}
func (value *CustomProperty) UnmarshalJSON(data []byte) error {
type Alias CustomProperty
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode CustomProperty: %w", err)
}
*value = CustomProperty(aux)
return nil
}
func (value *CustomProperty) Validate() error {
if value == nil {
return errors.New("CustomProperty is nil")
@@ -290,6 +361,18 @@ type DiffHunk struct {
Content *string `json:"content,omitempty"`
}
func (value *DiffHunk) UnmarshalJSON(data []byte) error {
type Alias DiffHunk
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode DiffHunk: %w", err)
}
*value = DiffHunk(aux)
return nil
}
func (value *DiffHunk) Validate() error {
if value == nil {
return errors.New("DiffHunk is nil")
@@ -312,6 +395,18 @@ type DiffPayload struct {
PackagesRemoved []PackageRef `json:"packagesRemoved,omitempty"`
}
func (value *DiffPayload) UnmarshalJSON(data []byte) error {
type Alias DiffPayload
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode DiffPayload: %w", err)
}
*value = DiffPayload(aux)
return nil
}
func (value *DiffPayload) Validate() error {
if value == nil {
return errors.New("DiffPayload is nil")
@@ -344,10 +439,25 @@ type DigestReference struct {
Value string `json:"value"`
}
func (value *DigestReference) UnmarshalJSON(data []byte) error {
type Alias DigestReference
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode DigestReference: %w", err)
}
*value = DigestReference(aux)
return nil
}
func (value *DigestReference) Validate() error {
if value == nil {
return errors.New("DigestReference is nil")
}
if !pattern0.MatchString(value.Value) {
return fmt.Errorf("DigestReference.Value must match ^[A-Fa-f0-9]{64}$")
}
return nil
}
@@ -356,6 +466,18 @@ type EnvironmentMetadata struct {
ImageDigest *DigestReference `json:"imageDigest,omitempty"`
}
func (value *EnvironmentMetadata) UnmarshalJSON(data []byte) error {
type Alias EnvironmentMetadata
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode EnvironmentMetadata: %w", err)
}
*value = EnvironmentMetadata(aux)
return nil
}
func (value *EnvironmentMetadata) Validate() error {
if value == nil {
return errors.New("EnvironmentMetadata is nil")
@@ -375,6 +497,18 @@ type FileChange struct {
ToHash *string `json:"toHash,omitempty"`
}
func (value *FileChange) UnmarshalJSON(data []byte) error {
type Alias FileChange
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode FileChange: %w", err)
}
*value = FileChange(aux)
return nil
}
func (value *FileChange) Validate() error {
if value == nil {
return errors.New("FileChange is nil")
@@ -393,6 +527,18 @@ type FindingKey struct {
CveId string `json:"cveId"`
}
func (value *FindingKey) UnmarshalJSON(data []byte) error {
type Alias FindingKey
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode FindingKey: %w", err)
}
*value = FindingKey(aux)
return nil
}
func (value *FindingKey) Validate() error {
if value == nil {
return errors.New("FindingKey is nil")
@@ -406,10 +552,25 @@ type ImageReference struct {
Tag *string `json:"tag,omitempty"`
}
func (value *ImageReference) UnmarshalJSON(data []byte) error {
type Alias ImageReference
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode ImageReference: %w", err)
}
*value = ImageReference(aux)
return nil
}
func (value *ImageReference) Validate() error {
if value == nil {
return errors.New("ImageReference is nil")
}
if !pattern1.MatchString(value.Digest) {
return fmt.Errorf("ImageReference.Digest must match ^sha256:[A-Fa-f0-9]{64}$")
}
return nil
}
@@ -418,6 +579,18 @@ type LicenseDelta struct {
Removed []string `json:"removed,omitempty"`
}
func (value *LicenseDelta) UnmarshalJSON(data []byte) error {
type Alias LicenseDelta
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode LicenseDelta: %w", err)
}
*value = LicenseDelta(aux)
return nil
}
func (value *LicenseDelta) Validate() error {
if value == nil {
return errors.New("LicenseDelta is nil")
@@ -434,6 +607,18 @@ type MaterialChange struct {
PriorityScore *float64 `json:"priorityScore,omitempty"`
}
func (value *MaterialChange) UnmarshalJSON(data []byte) error {
type Alias MaterialChange
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode MaterialChange: %w", err)
}
*value = MaterialChange(aux)
return nil
}
func (value *MaterialChange) Validate() error {
if value == nil {
return errors.New("MaterialChange is nil")
@@ -468,6 +653,18 @@ type MaterialReference struct {
Note *string `json:"note,omitempty"`
}
func (value *MaterialReference) UnmarshalJSON(data []byte) error {
type Alias MaterialReference
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode MaterialReference: %w", err)
}
*value = MaterialReference(aux)
return nil
}
func (value *MaterialReference) Validate() error {
if value == nil {
return errors.New("MaterialReference is nil")
@@ -491,6 +688,18 @@ type PackageChange struct {
LicenseDelta *LicenseDelta `json:"licenseDelta,omitempty"`
}
func (value *PackageChange) UnmarshalJSON(data []byte) error {
type Alias PackageChange
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode PackageChange: %w", err)
}
*value = PackageChange(aux)
return nil
}
func (value *PackageChange) Validate() error {
if value == nil {
return errors.New("PackageChange is nil")
@@ -509,6 +718,18 @@ type PackageRef struct {
Purl *string `json:"purl,omitempty"`
}
func (value *PackageRef) UnmarshalJSON(data []byte) error {
type Alias PackageRef
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode PackageRef: %w", err)
}
*value = PackageRef(aux)
return nil
}
func (value *PackageRef) Validate() error {
if value == nil {
return errors.New("PackageRef is nil")
@@ -524,6 +745,18 @@ type PolicyDecision struct {
Remediation *string `json:"remediation,omitempty"`
}
func (value *PolicyDecision) UnmarshalJSON(data []byte) error {
type Alias PolicyDecision
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode PolicyDecision: %w", err)
}
*value = PolicyDecision(aux)
return nil
}
func (value *PolicyDecision) Validate() error {
if value == nil {
return errors.New("PolicyDecision is nil")
@@ -543,6 +776,18 @@ type PolicyEvaluation struct {
Decisions []PolicyDecision `json:"decisions"`
}
func (value *PolicyEvaluation) UnmarshalJSON(data []byte) error {
type Alias PolicyEvaluation
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode PolicyEvaluation: %w", err)
}
*value = PolicyEvaluation(aux)
return nil
}
func (value *PolicyEvaluation) Validate() error {
if value == nil {
return errors.New("PolicyEvaluation is nil")
@@ -550,6 +795,9 @@ func (value *PolicyEvaluation) Validate() error {
if value.SchemaVersion != "StellaOps.PolicyEvaluation@1" {
return fmt.Errorf("PolicyEvaluation.SchemaVersion must equal StellaOps.PolicyEvaluation@1")
}
if !pattern1.MatchString(value.SubjectDigest) {
return fmt.Errorf("PolicyEvaluation.SubjectDigest must match ^sha256:[A-Fa-f0-9]{64}$")
}
if err := value.Outcome.Validate(); err != nil {
return fmt.Errorf("invalid PolicyEvaluation.Outcome: %w", err)
}
@@ -569,6 +817,18 @@ type ReachabilityGate struct {
Rationale *string `json:"rationale,omitempty"`
}
func (value *ReachabilityGate) UnmarshalJSON(data []byte) error {
type Alias ReachabilityGate
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode ReachabilityGate: %w", err)
}
*value = ReachabilityGate(aux)
return nil
}
func (value *ReachabilityGate) Validate() error {
if value == nil {
return errors.New("ReachabilityGate is nil")
@@ -588,6 +848,18 @@ type RiskFactor struct {
Description *string `json:"description,omitempty"`
}
func (value *RiskFactor) UnmarshalJSON(data []byte) error {
type Alias RiskFactor
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode RiskFactor: %w", err)
}
*value = RiskFactor(aux)
return nil
}
func (value *RiskFactor) Validate() error {
if value == nil {
return errors.New("RiskFactor is nil")
@@ -610,6 +882,18 @@ type RiskProfileEvidence struct {
Factors []RiskFactor `json:"factors"`
}
func (value *RiskProfileEvidence) UnmarshalJSON(data []byte) error {
type Alias RiskProfileEvidence
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode RiskProfileEvidence: %w", err)
}
*value = RiskProfileEvidence(aux)
return nil
}
func (value *RiskProfileEvidence) Validate() error {
if value == nil {
return errors.New("RiskProfileEvidence is nil")
@@ -617,6 +901,9 @@ func (value *RiskProfileEvidence) Validate() error {
if value.SchemaVersion != "StellaOps.RiskProfileEvidence@1" {
return fmt.Errorf("RiskProfileEvidence.SchemaVersion must equal StellaOps.RiskProfileEvidence@1")
}
if !pattern1.MatchString(value.SubjectDigest) {
return fmt.Errorf("RiskProfileEvidence.SubjectDigest must match ^sha256:[A-Fa-f0-9]{64}$")
}
if value.RiskScore < 0 {
return fmt.Errorf("RiskProfileEvidence.RiskScore must be >= 0")
}
@@ -643,6 +930,18 @@ type RiskState struct {
PolicyFlags []string `json:"policyFlags,omitempty"`
}
func (value *RiskState) UnmarshalJSON(data []byte) error {
type Alias RiskState
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode RiskState: %w", err)
}
*value = RiskState(aux)
return nil
}
func (value *RiskState) Validate() error {
if value == nil {
return errors.New("RiskState is nil")
@@ -667,6 +966,18 @@ type RuntimeContext struct {
User *UserContext `json:"user,omitempty"`
}
func (value *RuntimeContext) UnmarshalJSON(data []byte) error {
type Alias RuntimeContext
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode RuntimeContext: %w", err)
}
*value = RuntimeContext(aux)
return nil
}
func (value *RuntimeContext) Validate() error {
if value == nil {
return errors.New("RuntimeContext is nil")
@@ -689,6 +1000,18 @@ type SbomAttestation struct {
Packages []SbomPackage `json:"packages,omitempty"`
}
func (value *SbomAttestation) UnmarshalJSON(data []byte) error {
type Alias SbomAttestation
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode SbomAttestation: %w", err)
}
*value = SbomAttestation(aux)
return nil
}
func (value *SbomAttestation) Validate() error {
if value == nil {
return errors.New("SbomAttestation is nil")
@@ -696,6 +1019,9 @@ func (value *SbomAttestation) Validate() error {
if value.SchemaVersion != "StellaOps.SBOMAttestation@1" {
return fmt.Errorf("SbomAttestation.SchemaVersion must equal StellaOps.SBOMAttestation@1")
}
if !pattern1.MatchString(value.SubjectDigest) {
return fmt.Errorf("SbomAttestation.SubjectDigest must match ^sha256:[A-Fa-f0-9]{64}$")
}
if err := value.SbomFormat.Validate(); err != nil {
return fmt.Errorf("invalid SbomAttestation.SbomFormat: %w", err)
}
@@ -719,6 +1045,18 @@ type SbomPackage struct {
Licenses []string `json:"licenses,omitempty"`
}
func (value *SbomPackage) UnmarshalJSON(data []byte) error {
type Alias SbomPackage
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode SbomPackage: %w", err)
}
*value = SbomPackage(aux)
return nil
}
func (value *SbomPackage) Validate() error {
if value == nil {
return errors.New("SbomPackage is nil")
@@ -740,6 +1078,18 @@ type ScanFinding struct {
References []string `json:"references,omitempty"`
}
func (value *ScanFinding) UnmarshalJSON(data []byte) error {
type Alias ScanFinding
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode ScanFinding: %w", err)
}
*value = ScanFinding(aux)
return nil
}
func (value *ScanFinding) Validate() error {
if value == nil {
return errors.New("ScanFinding is nil")
@@ -773,6 +1123,18 @@ type ScanResults struct {
Findings []ScanFinding `json:"findings"`
}
func (value *ScanResults) UnmarshalJSON(data []byte) error {
type Alias ScanResults
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode ScanResults: %w", err)
}
*value = ScanResults(aux)
return nil
}
func (value *ScanResults) Validate() error {
if value == nil {
return errors.New("ScanResults is nil")
@@ -780,6 +1142,9 @@ func (value *ScanResults) Validate() error {
if value.SchemaVersion != "StellaOps.ScanResults@1" {
return fmt.Errorf("ScanResults.SchemaVersion must equal StellaOps.ScanResults@1")
}
if !pattern1.MatchString(value.SubjectDigest) {
return fmt.Errorf("ScanResults.SubjectDigest must match ^sha256:[A-Fa-f0-9]{64}$")
}
for i := range value.Findings {
if err := value.Findings[i].Validate(); err != nil {
return fmt.Errorf("invalid ScanResults.Findings[%d]: %w", i, err)
@@ -794,6 +1159,18 @@ type ScannerInfo struct {
Ruleset *string `json:"ruleset,omitempty"`
}
func (value *ScannerInfo) UnmarshalJSON(data []byte) error {
type Alias ScannerInfo
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode ScannerInfo: %w", err)
}
*value = ScannerInfo(aux)
return nil
}
func (value *ScannerInfo) Validate() error {
if value == nil {
return errors.New("ScannerInfo is nil")
@@ -813,6 +1190,18 @@ type SmartDiffPredicate struct {
MaterialChanges []MaterialChange `json:"materialChanges,omitempty"`
}
func (value *SmartDiffPredicate) UnmarshalJSON(data []byte) error {
type Alias SmartDiffPredicate
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode SmartDiffPredicate: %w", err)
}
*value = SmartDiffPredicate(aux)
return nil
}
func (value *SmartDiffPredicate) Validate() error {
if value == nil {
return errors.New("SmartDiffPredicate is nil")
@@ -859,6 +1248,18 @@ type UserContext struct {
Caps []string `json:"caps,omitempty"`
}
func (value *UserContext) UnmarshalJSON(data []byte) error {
type Alias UserContext
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode UserContext: %w", err)
}
*value = UserContext(aux)
return nil
}
func (value *UserContext) Validate() error {
if value == nil {
return errors.New("UserContext is nil")
@@ -883,6 +1284,18 @@ type VexAttestation struct {
Statements []VexStatement `json:"statements"`
}
func (value *VexAttestation) UnmarshalJSON(data []byte) error {
type Alias VexAttestation
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode VexAttestation: %w", err)
}
*value = VexAttestation(aux)
return nil
}
func (value *VexAttestation) Validate() error {
if value == nil {
return errors.New("VexAttestation is nil")
@@ -890,6 +1303,9 @@ func (value *VexAttestation) Validate() error {
if value.SchemaVersion != "StellaOps.VEXAttestation@1" {
return fmt.Errorf("VexAttestation.SchemaVersion must equal StellaOps.VEXAttestation@1")
}
if !pattern1.MatchString(value.SubjectDigest) {
return fmt.Errorf("VexAttestation.SubjectDigest must match ^sha256:[A-Fa-f0-9]{64}$")
}
if len(value.Statements) < 1 {
return fmt.Errorf("VexAttestation.Statements must contain at least 1 item(s)")
}
@@ -911,6 +1327,18 @@ type VexStatement struct {
References []string `json:"references,omitempty"`
}
func (value *VexStatement) UnmarshalJSON(data []byte) error {
type Alias VexStatement
dec := json.NewDecoder(bytes.NewReader(data))
dec.DisallowUnknownFields()
var aux Alias
if err := dec.Decode(&aux); err != nil {
return fmt.Errorf("failed to decode VexStatement: %w", err)
}
*value = VexStatement(aux)
return nil
}
func (value *VexStatement) Validate() error {
if value == nil {
return errors.New("VexStatement is nil")
@@ -928,87 +1356,204 @@ func (value *BuildProvenance) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal BuildProvenance: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse BuildProvenance: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *CustomEvidence) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal CustomEvidence: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse CustomEvidence: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *PolicyEvaluation) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal PolicyEvaluation: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse PolicyEvaluation: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *RiskProfileEvidence) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal RiskProfileEvidence: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse RiskProfileEvidence: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *SbomAttestation) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal SbomAttestation: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse SbomAttestation: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *ScanResults) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal ScanResults: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse ScanResults: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *SmartDiffPredicate) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal SmartDiffPredicate: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse SmartDiffPredicate: %w", err)
}
return canonicalizeJSON(decoded)
}
func (value *VexAttestation) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
raw, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal VexAttestation: %w", err)
}
return buf, nil
dec := json.NewDecoder(bytes.NewReader(raw))
dec.UseNumber()
var decoded any
if err := dec.Decode(&decoded); err != nil {
return nil, fmt.Errorf("failed to parse VexAttestation: %w", err)
}
return canonicalizeJSON(decoded)
}
func canonicalizeJSON(value any) ([]byte, error) {
var buf bytes.Buffer
if err := writeCanonicalValue(&buf, value); err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func writeCanonicalValue(buf *bytes.Buffer, value any) error {
switch v := value.(type) {
case nil:
buf.WriteString("null")
case bool:
if v {
buf.WriteString("true")
} else {
buf.WriteString("false")
}
case string:
encoded, err := json.Marshal(v)
if err != nil {
return err
}
buf.Write(encoded)
case json.Number:
text := v.String()
if text == "-0" {
text = "0"
}
buf.WriteString(text)
case []any:
buf.WriteByte('[')
for i, item := range v {
if i > 0 {
buf.WriteByte(',')
}
if err := writeCanonicalValue(buf, item); err != nil {
return err
}
}
buf.WriteByte(']')
case map[string]any:
keys := make([]string, 0, len(v))
for key := range v {
keys = append(keys, key)
}
sort.Strings(keys)
buf.WriteByte('{')
for i, key := range keys {
if i > 0 {
buf.WriteByte(',')
}
encoded, err := json.Marshal(key)
if err != nil {
return err
}
buf.Write(encoded)
buf.WriteByte(':')
if err := writeCanonicalValue(buf, v[key]); err != nil {
return err
}
}
buf.WriteByte('}')
default:
return fmt.Errorf("unsupported canonical type %T", value)
}
return nil
}

View File

@@ -275,6 +275,72 @@ export interface VexStatement {
references?: Array<string>;
}
const BuildMetadataKeys = Object.freeze(['buildFinishedOn', 'buildInvocationId', 'buildStartedOn', 'reproducible'] as const);
const BuildProvenanceKeys = Object.freeze(['buildType', 'builder', 'environment', 'materials', 'metadata', 'schemaVersion'] as const);
const BuilderIdentityKeys = Object.freeze(['id', 'platform', 'version'] as const);
const CustomEvidenceKeys = Object.freeze(['generatedAt', 'kind', 'properties', 'schemaVersion', 'subjectDigest'] as const);
const CustomPropertyKeys = Object.freeze(['key', 'value'] as const);
const DiffHunkKeys = Object.freeze(['content', 'lineCount', 'startLine'] as const);
const DiffPayloadKeys = Object.freeze(['filesAdded', 'filesChanged', 'filesRemoved', 'packagesAdded', 'packagesChanged', 'packagesRemoved'] as const);
const DigestReferenceKeys = Object.freeze(['algorithm', 'value'] as const);
const EnvironmentMetadataKeys = Object.freeze(['imageDigest', 'platform'] as const);
const FileChangeKeys = Object.freeze(['fromHash', 'hunks', 'path', 'toHash'] as const);
const FindingKeyKeys = Object.freeze(['componentPurl', 'componentVersion', 'cveId'] as const);
const ImageReferenceKeys = Object.freeze(['digest', 'name', 'tag'] as const);
const LicenseDeltaKeys = Object.freeze(['added', 'removed'] as const);
const MaterialChangeKeys = Object.freeze(['changeType', 'currentState', 'findingKey', 'previousState', 'priorityScore', 'reason'] as const);
const MaterialReferenceKeys = Object.freeze(['digests', 'note', 'uri'] as const);
const PackageChangeKeys = Object.freeze(['from', 'licenseDelta', 'name', 'purl', 'to'] as const);
const PackageRefKeys = Object.freeze(['name', 'purl', 'version'] as const);
const PolicyDecisionKeys = Object.freeze(['effect', 'policyId', 'reason', 'remediation', 'ruleId'] as const);
const PolicyEvaluationKeys = Object.freeze(['decisions', 'evaluatedAt', 'outcome', 'policyVersion', 'schemaVersion', 'subjectDigest'] as const);
const ReachabilityGateKeys = Object.freeze(['class', 'configActivated', 'rationale', 'reachable', 'runningUser'] as const);
const RiskFactorKeys = Object.freeze(['description', 'name', 'weight'] as const);
const RiskProfileEvidenceKeys = Object.freeze(['factors', 'generatedAt', 'riskLevel', 'riskScore', 'schemaVersion', 'subjectDigest'] as const);
const RiskStateKeys = Object.freeze(['epssScore', 'inAffectedRange', 'kev', 'policyFlags', 'reachable', 'vexStatus'] as const);
const RuntimeContextKeys = Object.freeze(['entrypoint', 'env', 'user'] as const);
const SbomAttestationKeys = Object.freeze(['componentCount', 'packages', 'sbomDigest', 'sbomFormat', 'sbomUri', 'schemaVersion', 'subjectDigest'] as const);
const SbomPackageKeys = Object.freeze(['licenses', 'purl', 'version'] as const);
const ScanFindingKeys = Object.freeze(['cvssScore', 'description', 'id', 'packageName', 'packageVersion', 'references', 'severity', 'status'] as const);
const ScanResultsKeys = Object.freeze(['findings', 'generatedAt', 'scannerName', 'scannerVersion', 'schemaVersion', 'subjectDigest'] as const);
const ScannerInfoKeys = Object.freeze(['name', 'ruleset', 'version'] as const);
const SmartDiffPredicateKeys = Object.freeze(['baseImage', 'context', 'diff', 'materialChanges', 'reachabilityGate', 'scanner', 'schemaVersion', 'suppressedCount', 'targetImage'] as const);
const UserContextKeys = Object.freeze(['caps', 'gid', 'uid'] as const);
const VexAttestationKeys = Object.freeze(['generatedAt', 'schemaVersion', 'statements', 'subjectDigest'] as const);
const VexStatementKeys = Object.freeze(['actionStatement', 'impactStatement', 'justification', 'references', 'status', 'timestamp', 'vulnerabilityId'] as const);
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value);
}
@@ -283,10 +349,19 @@ function pathString(path: string[]): string {
return path.length === 0 ? 'value' : `value.${path.join('.')}`;
}
function assertNoUnknownKeys(value: Record<string, unknown>, allowed: readonly string[], path: string[]): void {
for (const key of Object.keys(value)) {
if (!allowed.includes(key)) {
throw new Error(`${pathString(path)} has unknown property '${key}'.`);
}
}
}
function assertBuildMetadata(value: unknown, path: string[]): asserts value is BuildMetadata {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, BuildMetadataKeys, path);
if (value.buildStartedOn === undefined) {
throw new Error(`${pathString([...path, 'buildStartedOn'])} is required.`);
}
@@ -315,6 +390,7 @@ function assertBuildProvenance(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, BuildProvenanceKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -359,6 +435,7 @@ function assertBuilderIdentity(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, BuilderIdentityKeys, path);
if (value.id === undefined) {
throw new Error(`${pathString([...path, 'id'])} is required.`);
}
@@ -381,6 +458,7 @@ function assertCustomEvidence(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, CustomEvidenceKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -425,6 +503,7 @@ function assertCustomProperty(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, CustomPropertyKeys, path);
if (value.key === undefined) {
throw new Error(`${pathString([...path, 'key'])} is required.`);
}
@@ -443,6 +522,7 @@ function assertDiffHunk(value: unknown, path: string[]): asserts value is DiffHu
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, DiffHunkKeys, path);
if (value.startLine === undefined) {
throw new Error(`${pathString([...path, 'startLine'])} is required.`);
}
@@ -472,6 +552,7 @@ function assertDiffPayload(value: unknown, path: string[]): asserts value is Dif
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, DiffPayloadKeys, path);
if (value.filesAdded !== undefined) {
if (!Array.isArray(value.filesAdded)) {
throw new Error(`${pathString([...path, 'filesAdded'])} must be an array.`);
@@ -530,6 +611,7 @@ function assertDigestReference(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, DigestReferenceKeys, path);
if (value.algorithm === undefined) {
throw new Error(`${pathString([...path, 'algorithm'])} is required.`);
}
@@ -551,6 +633,7 @@ function assertEnvironmentMetadata(value: unknown, path: string[]): asserts valu
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, EnvironmentMetadataKeys, path);
if (value.platform !== undefined) {
if (typeof value.platform !== 'string') {
throw new Error(`${pathString([...path, 'platform'])} must be a string.`);
@@ -565,6 +648,7 @@ function assertFileChange(value: unknown, path: string[]): asserts value is File
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, FileChangeKeys, path);
if (value.path === undefined) {
throw new Error(`${pathString([...path, 'path'])} is required.`);
}
@@ -595,6 +679,7 @@ function assertFindingKey(value: unknown, path: string[]): asserts value is Find
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, FindingKeyKeys, path);
if (value.componentPurl === undefined) {
throw new Error(`${pathString([...path, 'componentPurl'])} is required.`);
}
@@ -619,6 +704,7 @@ function assertImageReference(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, ImageReferenceKeys, path);
if (value.digest === undefined) {
throw new Error(`${pathString([...path, 'digest'])} is required.`);
}
@@ -644,6 +730,7 @@ function assertLicenseDelta(value: unknown, path: string[]): asserts value is Li
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, LicenseDeltaKeys, path);
if (value.added !== undefined) {
if (!Array.isArray(value.added)) {
throw new Error(`${pathString([...path, 'added'])} must be an array.`);
@@ -670,6 +757,7 @@ function assertMaterialChange(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, MaterialChangeKeys, path);
if (value.findingKey === undefined) {
throw new Error(`${pathString([...path, 'findingKey'])} is required.`);
}
@@ -706,6 +794,7 @@ function assertMaterialReference(value: unknown, path: string[]): asserts value
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, MaterialReferenceKeys, path);
if (value.uri === undefined) {
throw new Error(`${pathString([...path, 'uri'])} is required.`);
}
@@ -735,6 +824,7 @@ function assertPackageChange(value: unknown, path: string[]): asserts value is P
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, PackageChangeKeys, path);
if (value.name === undefined) {
throw new Error(`${pathString([...path, 'name'])} is required.`);
}
@@ -767,6 +857,7 @@ function assertPackageRef(value: unknown, path: string[]): asserts value is Pack
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, PackageRefKeys, path);
if (value.name === undefined) {
throw new Error(`${pathString([...path, 'name'])} is required.`);
}
@@ -790,6 +881,7 @@ function assertPolicyDecision(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, PolicyDecisionKeys, path);
if (value.policyId === undefined) {
throw new Error(`${pathString([...path, 'policyId'])} is required.`);
}
@@ -824,6 +916,7 @@ function assertPolicyEvaluation(value: unknown, path: string[]): asserts value i
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, PolicyEvaluationKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -875,6 +968,7 @@ function assertReachabilityGate(value: unknown, path: string[]): asserts value i
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, ReachabilityGateKeys, path);
if (value.reachable !== undefined) {
if (typeof value.reachable !== 'boolean') {
throw new Error(`${pathString([...path, 'reachable'])} must be a boolean.`);
@@ -913,6 +1007,7 @@ function assertRiskFactor(value: unknown, path: string[]): asserts value is Risk
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, RiskFactorKeys, path);
if (value.name === undefined) {
throw new Error(`${pathString([...path, 'name'])} is required.`);
}
@@ -942,6 +1037,7 @@ function assertRiskProfileEvidence(value: unknown, path: string[]): asserts valu
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, RiskProfileEvidenceKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -999,6 +1095,7 @@ function assertRiskState(value: unknown, path: string[]): asserts value is RiskS
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, RiskStateKeys, path);
if (value.reachable !== undefined) {
if (typeof value.reachable !== 'boolean') {
throw new Error(`${pathString([...path, 'reachable'])} must be a boolean.`);
@@ -1048,6 +1145,7 @@ function assertRuntimeContext(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, RuntimeContextKeys, path);
if (value.entrypoint !== undefined) {
if (!Array.isArray(value.entrypoint)) {
throw new Error(`${pathString([...path, 'entrypoint'])} must be an array.`);
@@ -1079,6 +1177,7 @@ function assertSbomAttestation(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, SbomAttestationKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -1135,6 +1234,7 @@ function assertSbomPackage(value: unknown, path: string[]): asserts value is Sbo
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, SbomPackageKeys, path);
if (value.purl === undefined) {
throw new Error(`${pathString([...path, 'purl'])} is required.`);
}
@@ -1165,6 +1265,7 @@ function assertScanFinding(value: unknown, path: string[]): asserts value is Sca
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, ScanFindingKeys, path);
if (value.id === undefined) {
throw new Error(`${pathString([...path, 'id'])} is required.`);
}
@@ -1229,6 +1330,7 @@ function assertScanResults(value: unknown, path: string[]): asserts value is Sca
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, ScanResultsKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -1280,6 +1382,7 @@ function assertScannerInfo(value: unknown, path: string[]): asserts value is Sca
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, ScannerInfoKeys, path);
if (value.name === undefined) {
throw new Error(`${pathString([...path, 'name'])} is required.`);
}
@@ -1303,6 +1406,7 @@ function assertSmartDiffPredicate(value: unknown, path: string[]): asserts value
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, SmartDiffPredicateKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -1357,6 +1461,7 @@ function assertUserContext(value: unknown, path: string[]): asserts value is Use
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, UserContextKeys, path);
if (value.uid !== undefined) {
if (typeof value.uid !== 'number') {
throw new Error(`${pathString([...path, 'uid'])} must be a number.`);
@@ -1389,6 +1494,7 @@ function assertVexAttestation(value: unknown, path: string[]): asserts value is
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, VexAttestationKeys, path);
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
@@ -1431,6 +1537,7 @@ function assertVexStatement(value: unknown, path: string[]): asserts value is Ve
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
assertNoUnknownKeys(value, VexStatementKeys, path);
if (value.vulnerabilityId === undefined) {
throw new Error(`${pathString([...path, 'vulnerabilityId'])} is required.`);
}
@@ -1560,21 +1667,51 @@ export function canonicalizeVexAttestation(value: VexAttestation): string {
}
function canonicalStringify(input: unknown): string {
return JSON.stringify(sortValue(input));
return canonicalizeValue(input);
}
function sortValue(value: unknown): unknown {
function canonicalizeValue(value: unknown): string {
if (value === null) {
return 'null';
}
if (typeof value === 'string') {
return JSON.stringify(value);
}
if (typeof value === 'number') {
return formatNumber(value);
}
if (typeof value === 'boolean') {
return value ? 'true' : 'false';
}
if (Array.isArray(value)) {
return value.map(sortValue);
return `[${value.map(canonicalizeValue).join(',')}]`;
}
if (isRecord(value)) {
const ordered: Record<string, unknown> = {};
const keys = Object.keys(value).sort();
for (const key of keys) {
ordered[key] = sortValue(value[key]);
}
return ordered;
return canonicalizeObject(value);
}
return value;
throw new Error('Unsupported value for canonical JSON.');
}
function canonicalizeObject(value: Record<string, unknown>): string {
const keys = Object.keys(value).sort();
const entries: string[] = [];
for (const key of keys) {
const entry = value[key];
if (entry === undefined) {
continue;
}
entries.push(`${JSON.stringify(key)}:${canonicalizeValue(entry)}`);
}
return `{${entries.join(',')}}`;
}
function formatNumber(value: number): string {
if (!Number.isFinite(value)) {
throw new Error('Non-finite numbers are not allowed in canonical JSON.');
}
if (Object.is(value, -0)) {
return '0';
}
return value.toString();
}

File diff suppressed because one or more lines are too long

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-build-provenance.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-build-provenance.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "Build provenance evidence capturing builder inputs and outputs.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-custom-evidence.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-custom-evidence.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "Generic evidence payload for bespoke attestations.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-policy-evaluation.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-policy-evaluation.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "Policy evaluation outcome for an artifact.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-risk-profile.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-risk-profile.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "Risk scoring evidence summarising exposure for an artifact.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-sbom-attestation.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-sbom-attestation.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "SBOM attestation linking an SBOM document to an artifact.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-scan-results.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-scan-results.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "Scanner findings for an artifact at a point in time.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-smart-diff.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-smart-diff.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "Smart-Diff predicate describing differential analysis between two scans.",
"type": "object",
"additionalProperties": false,

View File

@@ -1,6 +1,7 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-vex-attestation.v1.json",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-vex-attestation.v1.schema.json",
"$comment": "Generated by StellaOps.Attestor.Types.Generator.",
"title": "VEX attestation describing vulnerability status for an artifact.",
"type": "object",
"additionalProperties": false,

File diff suppressed because one or more lines are too long

View File

@@ -1,119 +1 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestation/uncertainty-statement.v1.json",
"title": "Uncertainty Statement",
"description": "In-toto predicate type for uncertainty state attestations. Sprint: SPRINT_4300_0002_0002 (UATT-007).",
"type": "object",
"required": ["_type", "subject", "predicateType", "predicate"],
"properties": {
"_type": {
"type": "string",
"const": "https://in-toto.io/Statement/v1"
},
"subject": {
"type": "array",
"minItems": 1,
"items": {
"type": "object",
"required": ["digest"],
"properties": {
"name": {
"type": "string",
"description": "Subject identifier (e.g., SBOM file name or image reference)"
},
"digest": {
"type": "object",
"description": "Cryptographic digest of the subject",
"additionalProperties": {
"type": "string",
"pattern": "^[a-fA-F0-9]+$"
}
}
}
}
},
"predicateType": {
"type": "string",
"const": "uncertainty.stella/v1"
},
"predicate": {
"$ref": "#/$defs/UncertaintyPredicate"
}
},
"$defs": {
"UncertaintyPredicate": {
"type": "object",
"required": ["graphRevisionId", "aggregateTier", "meanEntropy", "unknownCount", "evaluatedAt"],
"properties": {
"graphRevisionId": {
"type": "string",
"description": "Unique identifier for the knowledge graph revision used in evaluation"
},
"aggregateTier": {
"type": "string",
"enum": ["T1", "T2", "T3", "T4"],
"description": "Aggregate uncertainty tier (T1 = highest uncertainty, T4 = lowest)"
},
"meanEntropy": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Mean entropy across all unknowns (0.0 = certain, 1.0 = maximum uncertainty)"
},
"unknownCount": {
"type": "integer",
"minimum": 0,
"description": "Total count of unknowns in this evaluation"
},
"markers": {
"type": "array",
"description": "Breakdown of unknowns by marker kind",
"items": {
"$ref": "#/$defs/UnknownMarker"
}
},
"evaluatedAt": {
"type": "string",
"format": "date-time",
"description": "ISO-8601 timestamp of uncertainty evaluation"
},
"policyRevisionId": {
"type": "string",
"description": "Optional policy revision ID if uncertainty was evaluated with policy"
},
"imageDigest": {
"type": "string",
"pattern": "^sha256:[a-fA-F0-9]{64}$",
"description": "Optional container image digest"
}
}
},
"UnknownMarker": {
"type": "object",
"required": ["kind", "count", "entropy"],
"properties": {
"kind": {
"type": "string",
"enum": ["U-RCH", "U-ID", "U-PROV", "U-VEX", "U-FEED", "U-CONFIG", "U-ANALYZER"],
"description": "Unknown marker kind code"
},
"count": {
"type": "integer",
"minimum": 0,
"description": "Count of unknowns with this marker"
},
"entropy": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Mean entropy for this marker kind"
},
"tier": {
"type": "string",
"enum": ["T1", "T2", "T3", "T4"],
"description": "Uncertainty tier for this marker kind"
}
}
}
}
}
{ "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://stella-ops.org/schemas/attestation/uncertainty-statement.v1.json", "title": "Uncertainty Statement", "description": "In-toto predicate type for uncertainty state attestations. Sprint: SPRINT_4300_0002_0002 (UATT-007).", "type": "object", "required": ["_type", "subject", "predicateType", "predicate"], "properties": { "_type": { "type": "string", "const": "https://in-toto.io/Statement/v1" }, "subject": { "type": "array", "minItems": 1, "items": { "type": "object", "required": ["digest"], "properties": { "name": { "type": "string", "description": "Subject identifier (e.g., SBOM file name or image reference)" }, "digest": { "type": "object", "description": "Cryptographic digest of the subject", "additionalProperties": { "type": "string", "pattern": "^[a-fA-F0-9]+$" } } } } }, "predicateType": { "type": "string", "const": "uncertainty.stella/v1" }, "predicate": { "$ref": "#/$defs/UncertaintyPredicate" } }, "$defs": { "UncertaintyPredicate": { "type": "object", "required": ["graphRevisionId", "aggregateTier", "meanEntropy", "unknownCount", "evaluatedAt"], "properties": { "graphRevisionId": { "type": "string", "description": "Unique identifier for the knowledge graph revision used in evaluation" }, "aggregateTier": { "type": "string", "enum": ["T1", "T2", "T3", "T4"], "description": "Aggregate uncertainty tier (T1 = highest uncertainty, T4 = lowest)" }, "meanEntropy": { "type": "number", "minimum": 0, "maximum": 1, "description": "Mean entropy across all unknowns (0.0 = certain, 1.0 = maximum uncertainty)" }, "unknownCount": { "type": "integer", "minimum": 0, "description": "Total count of unknowns in this evaluation" }, "markers": { "type": "array", "description": "Breakdown of unknowns by marker kind", "items": { "$ref": "#/$defs/UnknownMarker" } }, "evaluatedAt": { "type": "string", "format": "date-time", "description": "ISO-8601 timestamp of uncertainty evaluation" }, "policyRevisionId": { "type": "string", "description": "Optional policy revision ID if uncertainty was evaluated with policy" }, "imageDigest": { "type": "string", "pattern": "^sha256:[a-fA-F0-9]{64}$", "description": "Optional container image digest" } } }, "UnknownMarker": { "type": "object", "required": ["kind", "count", "entropy"], "properties": { "kind": { "type": "string", "enum": ["U-RCH", "U-ID", "U-PROV", "U-VEX", "U-FEED", "U-CONFIG", "U-ANALYZER"], "description": "Unknown marker kind code" }, "count": { "type": "integer", "minimum": 0, "description": "Count of unknowns with this marker" }, "entropy": { "type": "number", "minimum": 0, "maximum": 1, "description": "Mean entropy for this marker kind" }, "tier": { "type": "string", "enum": ["T1", "T2", "T3", "T4"], "description": "Uncertainty tier for this marker kind" } } } } }

View File

@@ -1,151 +1 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stellaops.io/schemas/verification-policy.v1.json",
"title": "VerificationPolicy",
"description": "Attestation verification policy configuration for StellaOps",
"type": "object",
"required": ["policyId", "version", "predicateTypes", "signerRequirements"],
"properties": {
"policyId": {
"type": "string",
"description": "Unique policy identifier",
"pattern": "^[a-z0-9-]+$",
"examples": ["default-verification-policy", "strict-slsa-policy"]
},
"version": {
"type": "string",
"description": "Policy version (SemVer)",
"pattern": "^\\d+\\.\\d+\\.\\d+$",
"examples": ["1.0.0", "2.1.0"]
},
"description": {
"type": "string",
"description": "Human-readable policy description"
},
"tenantScope": {
"type": "string",
"description": "Tenant ID this policy applies to, or '*' for all tenants",
"default": "*"
},
"predicateTypes": {
"type": "array",
"description": "Allowed attestation predicate types",
"items": {
"type": "string"
},
"minItems": 1,
"examples": [
["stella.ops/sbom@v1", "stella.ops/vex@v1"]
]
},
"signerRequirements": {
"$ref": "#/$defs/SignerRequirements"
},
"validityWindow": {
"$ref": "#/$defs/ValidityWindow"
},
"metadata": {
"type": "object",
"description": "Free-form metadata",
"additionalProperties": true
}
},
"$defs": {
"SignerRequirements": {
"type": "object",
"description": "Requirements for attestation signers",
"properties": {
"minimumSignatures": {
"type": "integer",
"minimum": 1,
"default": 1,
"description": "Minimum number of valid signatures required"
},
"trustedKeyFingerprints": {
"type": "array",
"items": {
"type": "string",
"pattern": "^sha256:[a-f0-9]{64}$"
},
"description": "List of trusted signer key fingerprints (SHA-256)"
},
"trustedIssuers": {
"type": "array",
"items": {
"type": "string",
"format": "uri"
},
"description": "List of trusted issuer identities (OIDC issuers)"
},
"requireRekor": {
"type": "boolean",
"default": false,
"description": "Require Sigstore Rekor transparency log entry"
},
"algorithms": {
"type": "array",
"items": {
"type": "string",
"enum": ["ES256", "ES384", "ES512", "RS256", "RS384", "RS512", "EdDSA"]
},
"description": "Allowed signing algorithms",
"default": ["ES256", "RS256", "EdDSA"]
}
}
},
"ValidityWindow": {
"type": "object",
"description": "Time-based validity constraints",
"properties": {
"notBefore": {
"type": "string",
"format": "date-time",
"description": "Policy not valid before this time (ISO-8601)"
},
"notAfter": {
"type": "string",
"format": "date-time",
"description": "Policy not valid after this time (ISO-8601)"
},
"maxAttestationAge": {
"type": "integer",
"minimum": 0,
"description": "Maximum age of attestation in seconds (0 = no limit)"
}
}
}
},
"examples": [
{
"policyId": "default-verification-policy",
"version": "1.0.0",
"description": "Default verification policy for StellaOps attestations",
"tenantScope": "*",
"predicateTypes": [
"stella.ops/sbom@v1",
"stella.ops/vex@v1",
"stella.ops/vexDecision@v1",
"stella.ops/policy@v1",
"stella.ops/promotion@v1",
"stella.ops/evidence@v1",
"stella.ops/graph@v1",
"stella.ops/replay@v1",
"https://slsa.dev/provenance/v1",
"https://cyclonedx.org/bom",
"https://spdx.dev/Document",
"https://openvex.dev/ns"
],
"signerRequirements": {
"minimumSignatures": 1,
"trustedKeyFingerprints": [
"sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2"
],
"requireRekor": false,
"algorithms": ["ES256", "RS256", "EdDSA"]
},
"validityWindow": {
"maxAttestationAge": 86400
}
}
]
}
{ "$schema": "https://json-schema.org/draft/2020-12/schema", "$id": "https://stellaops.io/schemas/verification-policy.v1.json", "title": "VerificationPolicy", "description": "Attestation verification policy configuration for StellaOps", "type": "object", "required": ["policyId", "version", "predicateTypes", "signerRequirements"], "properties": { "policyId": { "type": "string", "description": "Unique policy identifier", "pattern": "^[a-z0-9-]+$", "examples": ["default-verification-policy", "strict-slsa-policy"] }, "version": { "type": "string", "description": "Policy version (SemVer)", "pattern": "^\\d+\\.\\d+\\.\\d+$", "examples": ["1.0.0", "2.1.0"] }, "description": { "type": "string", "description": "Human-readable policy description" }, "tenantScope": { "type": "string", "description": "Tenant ID this policy applies to, or '*' for all tenants", "default": "*" }, "predicateTypes": { "type": "array", "description": "Allowed attestation predicate types", "items": { "type": "string" }, "minItems": 1, "examples": [ ["stella.ops/sbom@v1", "stella.ops/vex@v1"] ] }, "signerRequirements": { "$ref": "#/$defs/SignerRequirements" }, "validityWindow": { "$ref": "#/$defs/ValidityWindow" }, "metadata": { "type": "object", "description": "Free-form metadata", "additionalProperties": true } }, "$defs": { "SignerRequirements": { "type": "object", "description": "Requirements for attestation signers", "properties": { "minimumSignatures": { "type": "integer", "minimum": 1, "default": 1, "description": "Minimum number of valid signatures required" }, "trustedKeyFingerprints": { "type": "array", "items": { "type": "string", "pattern": "^sha256:[a-f0-9]{64}$" }, "description": "List of trusted signer key fingerprints (SHA-256)" }, "trustedIssuers": { "type": "array", "items": { "type": "string", "format": "uri" }, "description": "List of trusted issuer identities (OIDC issuers)" }, "requireRekor": { "type": "boolean", "default": false, "description": "Require Sigstore Rekor transparency log entry" }, "algorithms": { "type": "array", "items": { "type": "string", "enum": ["ES256", "ES384", "ES512", "RS256", "RS384", "RS512", "EdDSA"] }, "description": "Allowed signing algorithms", "default": ["ES256", "RS256", "EdDSA"] } } }, "ValidityWindow": { "type": "object", "description": "Time-based validity constraints", "properties": { "notBefore": { "type": "string", "format": "date-time", "description": "Policy not valid before this time (ISO-8601)" }, "notAfter": { "type": "string", "format": "date-time", "description": "Policy not valid after this time (ISO-8601)" }, "maxAttestationAge": { "type": "integer", "minimum": 0, "description": "Maximum age of attestation in seconds (0 = no limit)" } } } }, "examples": [ { "policyId": "default-verification-policy", "version": "1.0.0", "description": "Default verification policy for StellaOps attestations", "tenantScope": "*", "predicateTypes": [ "stella.ops/sbom@v1", "stella.ops/vex@v1", "stella.ops/vexDecision@v1", "stella.ops/policy@v1", "stella.ops/promotion@v1", "stella.ops/evidence@v1", "stella.ops/graph@v1", "stella.ops/replay@v1", "https://slsa.dev/provenance/v1", "https://cyclonedx.org/bom", "https://spdx.dev/Document", "https://openvex.dev/ns" ], "signerRequirements": { "minimumSignatures": 1, "trustedKeyFingerprints": [ "sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2" ], "requireRekor": false, "algorithms": ["ES256", "RS256", "EdDSA"] }, "validityWindow": { "maxAttestationAge": 86400 } } ] }

View File

@@ -1,7 +1,8 @@
using System.Buffers.Binary;
using System.Collections.Immutable;
using System.Formats.Asn1;
using System.IO;
using System.Linq;
using System.Net;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text;
@@ -234,8 +235,7 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
signatures.Add(signatureBytes);
}
var verified = 0;
var expectedSignatures = new List<byte[]>();
foreach (var secret in _options.Security.SignerIdentity.KmsKeys)
{
if (!TryDecodeSecret(secret, out var secretBytes))
@@ -244,14 +244,15 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
}
using var hmac = new HMACSHA256(secretBytes);
var computed = hmac.ComputeHash(preAuthEncoding);
expectedSignatures.Add(hmac.ComputeHash(preAuthEncoding));
}
foreach (var candidate in signatures)
var verified = 0;
foreach (var candidate in signatures)
{
if (expectedSignatures.Any(expected => CryptographicOperations.FixedTimeEquals(expected, candidate)))
{
if (CryptographicOperations.FixedTimeEquals(computed, candidate))
{
verified++;
}
verified++;
}
}
@@ -294,11 +295,11 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
var leafCertificate = certificates[0];
var subjectAltName = GetSubjectAlternativeNames(leafCertificate).FirstOrDefault();
if (_options.Security.SignerIdentity.FulcioRoots.Count > 0)
{
using var chain = new X509Chain
if (_options.Security.SignerIdentity.FulcioRoots.Count > 0)
{
ChainPolicy =
using var chain = new X509Chain
{
ChainPolicy =
{
RevocationMode = X509RevocationMode.NoCheck,
VerificationFlags = X509VerificationFlags.NoFlag,
@@ -306,29 +307,34 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
}
};
foreach (var rootPath in _options.Security.SignerIdentity.FulcioRoots)
{
try
foreach (var rootPath in _options.Security.SignerIdentity.FulcioRoots)
{
if (File.Exists(rootPath))
try
{
var rootCertificate = X509CertificateLoader.LoadCertificateFromFile(rootPath);
chain.ChainPolicy.CustomTrustStore.Add(rootCertificate);
if (File.Exists(rootPath))
{
var rootCertificate = X509CertificateLoader.LoadCertificateFromFile(rootPath);
chain.ChainPolicy.CustomTrustStore.Add(rootCertificate);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load Fulcio root {Root}", rootPath);
}
}
catch (Exception ex)
for (var i = 1; i < certificates.Count; i++)
{
_logger.LogWarning(ex, "Failed to load Fulcio root {Root}", rootPath);
chain.ChainPolicy.ExtraStore.Add(certificates[i]);
}
if (!chain.Build(leafCertificate))
{
var status = string.Join(";", chain.ChainStatus.Select(s => s.StatusInformation.Trim())).Trim(';');
issuerIssues.Add(string.IsNullOrEmpty(status) ? "certificate_chain_untrusted" : $"certificate_chain_untrusted:{status}");
}
}
if (!chain.Build(leafCertificate))
{
var status = string.Join(";", chain.ChainStatus.Select(s => s.StatusInformation.Trim())).Trim(';');
issuerIssues.Add(string.IsNullOrEmpty(status) ? "certificate_chain_untrusted" : $"certificate_chain_untrusted:{status}");
}
}
if (_options.Security.SignerIdentity.AllowedSans.Count > 0)
{
var sans = GetSubjectAlternativeNames(leafCertificate);
@@ -775,14 +781,44 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
{
if (string.Equals(extension.Oid?.Value, "2.5.29.17", StringComparison.Ordinal))
{
var formatted = extension.Format(true);
var lines = formatted.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var line in lines)
AsnReader reader;
try
{
var parts = line.Split('=');
if (parts.Length == 2)
reader = new AsnReader(extension.RawData, AsnEncodingRules.DER);
}
catch (AsnContentException)
{
yield break;
}
var sequence = reader.ReadSequence();
while (sequence.HasData)
{
var tag = sequence.PeekTag();
if (tag.TagClass != TagClass.ContextSpecific)
{
yield return parts[1].Trim();
sequence.ReadEncodedValue();
continue;
}
switch (tag.TagValue)
{
case 1:
yield return sequence.ReadCharacterString(UniversalTagNumber.IA5String, new Asn1Tag(TagClass.ContextSpecific, 1));
break;
case 2:
yield return sequence.ReadCharacterString(UniversalTagNumber.IA5String, new Asn1Tag(TagClass.ContextSpecific, 2));
break;
case 6:
yield return sequence.ReadCharacterString(UniversalTagNumber.IA5String, new Asn1Tag(TagClass.ContextSpecific, 6));
break;
case 7:
var ipBytes = sequence.ReadOctetString(new Asn1Tag(TagClass.ContextSpecific, 7));
yield return new IPAddress(ipBytes).ToString();
break;
default:
sequence.ReadEncodedValue();
break;
}
}
}
@@ -791,21 +827,32 @@ public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
private static byte[] ComputePreAuthEncoding(string payloadType, byte[] payload)
{
var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length];
var payloadTypeValue = payloadType ?? string.Empty;
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadTypeValue);
var payloadTypeLength = Encoding.ASCII.GetBytes(payloadTypeBytes.Length.ToString(System.Globalization.CultureInfo.InvariantCulture));
var payloadLength = Encoding.ASCII.GetBytes(payload.Length.ToString(System.Globalization.CultureInfo.InvariantCulture));
var space = new byte[] { (byte)' ' };
var totalLength = 6 + space.Length + payloadTypeLength.Length + space.Length + payloadTypeBytes.Length
+ space.Length + payloadLength.Length + space.Length + payload.Length;
var buffer = new byte[totalLength];
var offset = 0;
Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset);
offset += 6;
static void CopyBytes(byte[] source, byte[] destination, ref int index)
{
Buffer.BlockCopy(source, 0, destination, index, source.Length);
index += source.Length;
}
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length);
offset += 8;
Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length);
offset += headerBytes.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length);
offset += 8;
Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length);
CopyBytes(Encoding.ASCII.GetBytes("DSSEv1"), buffer, ref offset);
CopyBytes(space, buffer, ref offset);
CopyBytes(payloadTypeLength, buffer, ref offset);
CopyBytes(space, buffer, ref offset);
CopyBytes(payloadTypeBytes, buffer, ref offset);
CopyBytes(space, buffer, ref offset);
CopyBytes(payloadLength, buffer, ref offset);
CopyBytes(space, buffer, ref offset);
payload.CopyTo(buffer.AsSpan(offset));
return buffer;
}

View File

@@ -1,21 +1,17 @@
// ───────────────────────────────────────────────────────────────────────────
// StellaOps Attestor Distributed Verification Provider (Resilient, Multi-Node)
// -----------------------------------------------------------------------------
// StellaOps Attestor - Distributed Verification Provider (Resilient, Multi-Node)
// SPDX-License-Identifier: AGPL-3.0-or-later
// ───────────────────────────────────────────────────────────────────────────
// -----------------------------------------------------------------------------
#if STELLAOPS_EXPERIMENTAL_DISTRIBUTED_VERIFY
using System.Buffers.Binary;
using System.Collections.Concurrent;
using System.Net.Http.Json;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Polly;
using Polly.CircuitBreaker;
using Polly.Retry;
using Polly.Timeout;
using StellaOps.Attestor.Verify.Configuration;
using StellaOps.Attestor.Verify.Models;
@@ -32,7 +28,6 @@ public class DistributedVerificationProvider : IVerificationProvider
private readonly HttpClient _httpClient;
private readonly ConcurrentDictionary<string, CircuitBreakerState> _circuitStates = new();
private readonly ConsistentHashRing _hashRing;
private readonly ResiliencePipeline<VerificationResult> _resiliencePipeline;
public DistributedVerificationProvider(
ILogger<DistributedVerificationProvider> logger,
@@ -49,7 +44,6 @@ public class DistributedVerificationProvider : IVerificationProvider
}
_hashRing = new ConsistentHashRing(_options.Nodes, _options.VirtualNodeMultiplier);
_resiliencePipeline = BuildResiliencePipeline();
_logger.LogInformation("Initialized distributed verification provider with {NodeCount} nodes", _options.Nodes.Count);
}
@@ -83,9 +77,7 @@ public class DistributedVerificationProvider : IVerificationProvider
try
{
var result = await _resiliencePipeline.ExecuteAsync(
async ct => await ExecuteVerificationAsync(node, request, ct),
cancellationToken);
var result = await ExecuteWithRetriesAsync(node, request, cancellationToken);
_logger.LogInformation(
"Verification request {RequestId} completed on node {NodeId} with result {Status}",
@@ -196,37 +188,36 @@ public class DistributedVerificationProvider : IVerificationProvider
return result ?? throw new InvalidOperationException("Received null response from verification node");
}
private ResiliencePipeline<VerificationResult> BuildResiliencePipeline()
private async Task<VerificationResult> ExecuteWithRetriesAsync(
VerificationNode node,
VerificationRequest request,
CancellationToken cancellationToken)
{
return new ResiliencePipelineBuilder<VerificationResult>()
.AddTimeout(new TimeoutStrategyOptions
Exception? lastError = null;
for (var attempt = 0; attempt <= _options.MaxRetries; attempt++)
{
using var attemptCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
attemptCts.CancelAfter(_options.RequestTimeout);
try
{
Timeout = _options.RequestTimeout,
OnTimeout = args =>
{
_logger.LogWarning("Request timed out after {Timeout}", args.Timeout);
return default;
},
})
.AddRetry(new RetryStrategyOptions<VerificationResult>
return await ExecuteVerificationAsync(node, request, attemptCts.Token);
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
MaxRetryAttempts = _options.MaxRetries,
Delay = _options.RetryDelay,
BackoffType = DelayBackoffType.Exponential,
ShouldHandle = new PredicateBuilder<VerificationResult>()
.Handle<HttpRequestException>()
.Handle<TaskCanceledException>(),
OnRetry = args =>
lastError = ex;
if (attempt >= _options.MaxRetries)
{
_logger.LogWarning(
args.Outcome.Exception,
"Retry attempt {AttemptNumber} after delay {Delay}",
args.AttemptNumber,
args.RetryDelay);
return default;
},
})
.Build();
break;
}
_logger.LogWarning(ex, "Retry attempt {AttemptNumber} after delay {Delay}", attempt + 1, _options.RetryDelay);
await Task.Delay(_options.RetryDelay, cancellationToken);
}
}
throw lastError ?? new InvalidOperationException("Verification retry failed.");
}
private static string ComputeRoutingKey(VerificationRequest request)
@@ -342,7 +333,7 @@ internal sealed class ConsistentHashRing
private static int ComputeHash(string key)
{
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(key));
return BitConverter.ToInt32(hashBytes, 0);
return BinaryPrimitives.ReadInt32BigEndian(hashBytes);
}
}

View File

@@ -4,7 +4,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Attestor\\StellaOps.Attestor.Core\\StellaOps.Attestor.Core.csproj" />

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0071-M | DONE | Maintainability audit for StellaOps.Attestor.Verify. |
| AUDIT-0071-T | DONE | Test coverage audit for StellaOps.Attestor.Verify. |
| AUDIT-0071-A | TODO | Pending approval for changes. |
| AUDIT-0071-A | DONE | Applied DSSE PAE spec, SAN parsing, keyless chain store fix, KMS count fix, distributed provider cleanup, and tests. |

View File

@@ -1,5 +1,6 @@
using System;
using System.Buffers.Binary;
using System.Buffers;
using System.Globalization;
using System.Text;
namespace StellaOps.Attestor.Core.Signing;
@@ -10,27 +11,33 @@ namespace StellaOps.Attestor.Core.Signing;
public static class DssePreAuthenticationEncoding
{
private static readonly byte[] Prefix = Encoding.ASCII.GetBytes("DSSEv1");
private static readonly byte[] Space = new byte[] { (byte)' ' };
public static byte[] Compute(string payloadType, ReadOnlySpan<byte> payload)
{
var header = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[Prefix.Length + sizeof(long) + header.Length + sizeof(long) + payload.Length];
var offset = 0;
var payloadTypeValue = payloadType ?? string.Empty;
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadTypeValue);
var payloadTypeLength = Encoding.ASCII.GetBytes(payloadTypeBytes.Length.ToString(CultureInfo.InvariantCulture));
var payloadLength = Encoding.ASCII.GetBytes(payload.Length.ToString(CultureInfo.InvariantCulture));
Prefix.CopyTo(buffer, offset);
offset += Prefix.Length;
var buffer = new ArrayBufferWriter<byte>();
Write(buffer, Prefix);
Write(buffer, Space);
Write(buffer, payloadTypeLength);
Write(buffer, Space);
Write(buffer, payloadTypeBytes);
Write(buffer, Space);
Write(buffer, payloadLength);
Write(buffer, Space);
Write(buffer, payload);
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, sizeof(long)), (ulong)header.Length);
offset += sizeof(long);
return buffer.WrittenSpan.ToArray();
}
header.CopyTo(buffer, offset);
offset += header.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, sizeof(long)), (ulong)payload.Length);
offset += sizeof(long);
payload.CopyTo(buffer.AsSpan(offset));
return buffer;
private static void Write(ArrayBufferWriter<byte> writer, ReadOnlySpan<byte> bytes)
{
var span = writer.GetSpan(bytes.Length);
bytes.CopyTo(span);
writer.Advance(bytes.Length);
}
}

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0049-M | DONE | Maintainability audit for StellaOps.Attestor.Core. |
| AUDIT-0049-T | DONE | Test coverage audit for StellaOps.Attestor.Core. |
| AUDIT-0049-A | TODO | Pending approval for changes. |
| AUDIT-0049-A | DOING | Pending approval for changes. |

View File

@@ -11,6 +11,17 @@ internal sealed class InMemoryBulkVerificationJobStore : IBulkVerificationJobSto
{
private readonly ConcurrentQueue<BulkVerificationJob> _queue = new();
private readonly ConcurrentDictionary<string, BulkVerificationJob> _jobs = new(StringComparer.OrdinalIgnoreCase);
private readonly TimeProvider _timeProvider;
public InMemoryBulkVerificationJobStore()
: this(TimeProvider.System)
{
}
public InMemoryBulkVerificationJobStore(TimeProvider timeProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
public Task<BulkVerificationJob> CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
@@ -36,7 +47,7 @@ internal sealed class InMemoryBulkVerificationJobStore : IBulkVerificationJobSto
}
job.Status = BulkVerificationJobStatus.Running;
job.StartedAt ??= DateTimeOffset.UtcNow;
job.StartedAt ??= _timeProvider.GetUtcNow();
return Task.FromResult<BulkVerificationJob?>(job);
}

View File

@@ -1,3 +1,4 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Attestor.Tests")]
[assembly: InternalsVisibleTo("StellaOps.Attestor.Infrastructure.Tests")]

View File

@@ -499,6 +499,10 @@ public sealed class PostgresRekorSubmissionQueue : IRekorSubmissionQueue
private static RekorQueueItem ReadQueueItem(NpgsqlDataReader reader)
{
var nextRetryAtOrdinal = reader.GetOrdinal("next_retry_at");
var createdAtOrdinal = reader.GetOrdinal("created_at");
var updatedAtOrdinal = reader.GetOrdinal("updated_at");
return new RekorQueueItem
{
Id = reader.GetGuid(reader.GetOrdinal("id")),
@@ -509,9 +513,11 @@ public sealed class PostgresRekorSubmissionQueue : IRekorSubmissionQueue
Status = Enum.Parse<RekorSubmissionStatus>(reader.GetString(reader.GetOrdinal("status")), ignoreCase: true),
AttemptCount = reader.GetInt32(reader.GetOrdinal("attempt_count")),
MaxAttempts = reader.GetInt32(reader.GetOrdinal("max_attempts")),
NextRetryAt = reader.GetDateTime(reader.GetOrdinal("next_retry_at")),
CreatedAt = reader.GetDateTime(reader.GetOrdinal("created_at")),
UpdatedAt = reader.GetDateTime(reader.GetOrdinal("updated_at")),
NextRetryAt = reader.IsDBNull(nextRetryAtOrdinal)
? null
: reader.GetFieldValue<DateTimeOffset>(nextRetryAtOrdinal),
CreatedAt = reader.GetFieldValue<DateTimeOffset>(createdAtOrdinal),
UpdatedAt = reader.GetFieldValue<DateTimeOffset>(updatedAtOrdinal),
LastError = reader.IsDBNull(reader.GetOrdinal("last_error"))
? null
: reader.GetString(reader.GetOrdinal("last_error")),

View File

@@ -205,6 +205,13 @@ internal sealed class HttpRekorClient : IRekorClient
try
{
var logIndex = await GetLogIndexAsync(rekorUuid, backend, cancellationToken).ConfigureAwait(false);
if (!logIndex.HasValue)
{
return RekorInclusionVerificationResult.Failure(
"Failed to resolve Rekor log index for inclusion proof");
}
// Compute expected leaf hash from payload
var expectedLeafHash = MerkleProofVerifier.HashLeaf(payloadDigest);
var actualLeafHash = MerkleProofVerifier.HexToBytes(proof.Inclusion.LeafHash);
@@ -225,13 +232,10 @@ internal sealed class HttpRekorClient : IRekorClient
var expectedRootHash = MerkleProofVerifier.HexToBytes(proof.Checkpoint.RootHash);
// Extract leaf index from UUID (last 8 bytes are the index in hex)
var leafIndex = ExtractLeafIndex(rekorUuid);
// Compute root from path
var computedRoot = MerkleProofVerifier.ComputeRootFromPath(
actualLeafHash,
leafIndex,
logIndex.Value,
proof.Checkpoint.Size,
proofPath);
@@ -248,7 +252,7 @@ internal sealed class HttpRekorClient : IRekorClient
// Verify root hash matches checkpoint
var verified = MerkleProofVerifier.VerifyInclusion(
actualLeafHash,
leafIndex,
logIndex.Value,
proof.Checkpoint.Size,
proofPath,
expectedRootHash);
@@ -263,13 +267,13 @@ internal sealed class HttpRekorClient : IRekorClient
_logger.LogInformation(
"Successfully verified Rekor inclusion for UUID {Uuid} at index {Index}",
rekorUuid, leafIndex);
rekorUuid, logIndex);
return RekorInclusionVerificationResult.Success(
leafIndex,
logIndex.Value,
computedRootHex,
proof.Checkpoint.RootHash,
checkpointSignatureValid: true); // TODO: Implement checkpoint signature verification
checkpointSignatureValid: false);
}
catch (Exception ex) when (ex is FormatException or ArgumentException)
{
@@ -279,36 +283,47 @@ internal sealed class HttpRekorClient : IRekorClient
}
}
/// <summary>
/// Extracts the leaf index from a Rekor UUID.
/// Rekor UUIDs are formatted as: &lt;entry-hash&gt;-&lt;tree-id&gt;-&lt;log-index-hex&gt;
/// </summary>
private static long ExtractLeafIndex(string rekorUuid)
private async Task<long?> GetLogIndexAsync(string rekorUuid, RekorBackend backend, CancellationToken cancellationToken)
{
// Try to parse as hex number from the end of the UUID
// Rekor v1 format: 64 hex chars for entry hash + log index suffix
if (rekorUuid.Length >= 16)
var entryUri = BuildUri(backend.Url, $"api/v2/log/entries/{rekorUuid}");
using var request = new HttpRequestMessage(HttpMethod.Get, entryUri);
using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
// Take last 16 chars as potential hex index
var indexPart = rekorUuid[^16..];
if (long.TryParse(indexPart, System.Globalization.NumberStyles.HexNumber, null, out var index))
_logger.LogDebug("Rekor entry {Uuid} not found when resolving log index", rekorUuid);
return null;
}
response.EnsureSuccessStatusCode();
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false);
return TryGetLogIndex(document.RootElement, out var logIndex) ? logIndex : null;
}
private static bool TryGetLogIndex(JsonElement element, out long logIndex)
{
if (element.ValueKind == JsonValueKind.Object)
{
if (element.TryGetProperty("logIndex", out var logIndexElement)
&& logIndexElement.TryGetInt64(out logIndex))
{
return index;
return true;
}
foreach (var property in element.EnumerateObject())
{
if (TryGetLogIndex(property.Value, out logIndex))
{
return true;
}
}
}
// Fallback: try parsing UUID parts separated by dashes
var parts = rekorUuid.Split('-');
if (parts.Length >= 1)
{
var lastPart = parts[^1];
if (long.TryParse(lastPart, System.Globalization.NumberStyles.HexNumber, null, out var index))
{
return index;
}
}
// Default to 0 if we can't parse
return 0;
logIndex = 0;
return false;
}
}

View File

@@ -1,4 +1,7 @@
using System;
using System.Buffers.Binary;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -10,15 +13,18 @@ namespace StellaOps.Attestor.Infrastructure.Rekor;
internal sealed class StubRekorClient : IRekorClient
{
private readonly ILogger<StubRekorClient> _logger;
private readonly TimeProvider _timeProvider;
public StubRekorClient(ILogger<StubRekorClient> logger)
public StubRekorClient(ILogger<StubRekorClient> logger, TimeProvider timeProvider)
{
_logger = logger;
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
public Task<RekorSubmissionResponse> SubmitAsync(AttestorSubmissionRequest request, RekorBackend backend, CancellationToken cancellationToken = default)
{
var uuid = Guid.NewGuid().ToString();
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(request.Meta.BundleSha256 ?? string.Empty));
var uuid = new Guid(hash.AsSpan(0, 16)).ToString();
_logger.LogInformation("Stub Rekor submission for bundle {BundleSha} -> {Uuid}", request.Meta.BundleSha256, uuid);
var proof = new RekorProofResponse
@@ -28,7 +34,7 @@ internal sealed class StubRekorClient : IRekorClient
Origin = backend.Url.Host,
Size = 1,
RootHash = request.Meta.BundleSha256,
Timestamp = DateTimeOffset.UtcNow
Timestamp = _timeProvider.GetUtcNow()
},
Inclusion = new RekorProofResponse.RekorInclusionProof
{
@@ -40,7 +46,7 @@ internal sealed class StubRekorClient : IRekorClient
var response = new RekorSubmissionResponse
{
Uuid = uuid,
Index = Random.Shared.NextInt64(1, long.MaxValue),
Index = ComputeDeterministicIndex(hash),
LogUrl = new Uri(backend.Url, $"/api/v2/log/entries/{uuid}").ToString(),
Status = "included",
Proof = proof
@@ -59,7 +65,7 @@ internal sealed class StubRekorClient : IRekorClient
Origin = backend.Url.Host,
Size = 1,
RootHash = string.Empty,
Timestamp = DateTimeOffset.UtcNow
Timestamp = _timeProvider.GetUtcNow()
},
Inclusion = new RekorProofResponse.RekorInclusionProof
{
@@ -85,4 +91,20 @@ internal sealed class StubRekorClient : IRekorClient
expectedRootHash: "stub-root-hash",
checkpointSignatureValid: true));
}
private static long ComputeDeterministicIndex(byte[] hash)
{
if (hash.Length < sizeof(long))
{
return 1;
}
var value = BinaryPrimitives.ReadInt64BigEndian(hash.AsSpan(0, sizeof(long)));
if (value == long.MinValue)
{
return long.MaxValue;
}
return Math.Abs(value);
}
}

View File

@@ -35,6 +35,7 @@ public static class ServiceCollectionExtensions
public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services)
{
services.AddMemoryCache();
services.AddSingleton(TimeProvider.System);
services.AddSingleton<IDsseCanonicalizer, DefaultDsseCanonicalizer>();
services.AddSingleton(sp =>
@@ -66,9 +67,21 @@ public static class ServiceCollectionExtensions
services.AddSingleton<IAttestorBundleService, AttestorBundleService>();
services.AddSingleton<AttestorSigningKeyRegistry>();
services.AddSingleton<IAttestationSigningService, AttestorSigningService>();
services.AddHttpClient<HttpRekorClient>(client =>
services.AddHttpClient<HttpRekorClient>((sp, client) =>
{
client.Timeout = TimeSpan.FromSeconds(30);
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
var timeoutMs = options.Rekor.Primary.ProofTimeoutMs;
if (options.Rekor.Mirror.Enabled)
{
timeoutMs = Math.Max(timeoutMs, options.Rekor.Mirror.ProofTimeoutMs);
}
if (timeoutMs <= 0)
{
timeoutMs = 15_000;
}
client.Timeout = TimeSpan.FromMilliseconds(timeoutMs);
});
services.AddSingleton<IRekorClient>(sp => sp.GetRequiredService<HttpRekorClient>());
@@ -104,7 +117,7 @@ public static class ServiceCollectionExtensions
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (string.IsNullOrWhiteSpace(options.Redis.Url))
{
return new InMemoryAttestorDedupeStore();
return ActivatorUtilities.CreateInstance<InMemoryAttestorDedupeStore>(sp);
}
var multiplexer = sp.GetRequiredService<IConnectionMultiplexer>();

View File

@@ -185,27 +185,22 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
throw new InvalidOperationException($"Signing key '{key.KeyId}' must specify kmsVersionId when using mode 'kms'.");
}
var material = kmsClient.ExportAsync(providerKeyId, versionId, default).GetAwaiter().GetResult();
var parameters = new ECParameters
{
Curve = ECCurve.NamedCurves.nistP256,
D = material.D,
Q = new ECPoint
{
X = material.Qx,
Y = material.Qy
}
};
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["kms.version"] = material.VersionId
["kms.version"] = versionId
};
var privateHandle = System.Text.Encoding.UTF8.GetBytes(
string.IsNullOrWhiteSpace(versionId) ? providerKeyId : versionId);
if (privateHandle.Length == 0)
{
throw new InvalidOperationException($"Signing key '{key.KeyId}' must supply a non-empty KMS reference.");
}
var signingKey = new CryptoSigningKey(
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,
in parameters,
privateHandle,
now,
expiresAt: null,
metadata: metadata);

View File

@@ -4,7 +4,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />

View File

@@ -1,29 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Verify\StellaOps.Attestor.Verify.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.SmSoft\StellaOps.Cryptography.Plugin.SmSoft.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
<PackageReference Include="AWSSDK.S3" Version="4.0.2" />
</ItemGroup>
</Project>

View File

@@ -8,11 +8,15 @@ namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class InMemoryAttestorAuditSink : IAttestorAuditSink
{
private readonly object _sync = new();
public List<AttestorAuditRecord> Records { get; } = new();
public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default)
{
Records.Add(record);
lock (_sync)
{
Records.Add(record);
}
return Task.CompletedTask;
}
}

View File

@@ -9,12 +9,23 @@ namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class InMemoryAttestorDedupeStore : IAttestorDedupeStore
{
private readonly ConcurrentDictionary<string, (string Uuid, DateTimeOffset ExpiresAt)> _store = new();
private readonly TimeProvider _timeProvider;
public InMemoryAttestorDedupeStore()
: this(TimeProvider.System)
{
}
public InMemoryAttestorDedupeStore(TimeProvider timeProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
public Task<string?> TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
if (_store.TryGetValue(bundleSha256, out var entry))
{
if (entry.ExpiresAt > DateTimeOffset.UtcNow)
if (entry.ExpiresAt > _timeProvider.GetUtcNow())
{
return Task.FromResult<string?>(entry.Uuid);
}
@@ -27,7 +38,7 @@ internal sealed class InMemoryAttestorDedupeStore : IAttestorDedupeStore
public Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default)
{
_store[bundleSha256] = (rekorUuid, DateTimeOffset.UtcNow.Add(ttl));
_store[bundleSha256] = (rekorUuid, _timeProvider.GetUtcNow().Add(ttl));
return Task.CompletedTask;
}
}

View File

@@ -141,7 +141,7 @@ internal sealed class InMemoryAttestorEntryRepository : IAttestorEntryRepository
return false;
}
return string.CompareOrdinal(e.RekorUuid, continuation.RekorUuid) >= 0;
return string.CompareOrdinal(e.RekorUuid, continuation.RekorUuid) > 0;
});
}
@@ -150,19 +150,19 @@ internal sealed class InMemoryAttestorEntryRepository : IAttestorEntryRepository
.ThenBy(e => e.RekorUuid, StringComparer.Ordinal);
var page = ordered.Take(pageSize + 1).ToList();
AttestorEntry? next = null;
AttestorEntry? continuationSource = null;
if (page.Count > pageSize)
{
next = page[^1];
page.RemoveAt(page.Count - 1);
continuationSource = page[^1];
}
var result = new AttestorEntryQueryResult
{
Items = page,
ContinuationToken = next is null
ContinuationToken = continuationSource is null
? null
: AttestorEntryContinuationToken.Encode(next.CreatedAt, next.RekorUuid)
: AttestorEntryContinuationToken.Encode(continuationSource.CreatedAt, continuationSource.RekorUuid)
};
return Task.FromResult(result);

View File

@@ -54,7 +54,8 @@ internal sealed class S3AttestorArchiveStore : IAttestorArchiveStore, IDisposabl
metadata["bundle.sha256"] = bundle.BundleSha256;
metadata["rekor.uuid"] = bundle.RekorUuid;
var metadataObject = JsonSerializer.SerializeToUtf8Bytes(metadata);
var orderedMetadata = new SortedDictionary<string, string>(metadata, StringComparer.Ordinal);
var metadataObject = JsonSerializer.SerializeToUtf8Bytes(orderedMetadata);
await PutObjectAsync(prefix + "meta/" + bundle.RekorUuid + ".json", metadataObject, cancellationToken).ConfigureAwait(false);
await PutObjectAsync(prefix + "meta/" + bundle.BundleSha256 + ".json", metadataObject, cancellationToken).ConfigureAwait(false);
}

View File

@@ -1,3 +1,4 @@
using System;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Threading;
@@ -16,6 +17,8 @@ public sealed class DefaultDsseCanonicalizer : IDsseCanonicalizer
public Task<byte[]> CanonicalizeAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default)
{
cancellationToken.ThrowIfCancellationRequested();
var node = new JsonObject
{
["payloadType"] = request.Bundle.Dsse.PayloadType,
@@ -23,14 +26,16 @@ public sealed class DefaultDsseCanonicalizer : IDsseCanonicalizer
["signatures"] = CreateSignaturesArray(request)
};
var json = node.ToJsonString(SerializerOptions);
return Task.FromResult(JsonSerializer.SerializeToUtf8Bytes(JsonNode.Parse(json)!, SerializerOptions));
var bytes = JsonSerializer.SerializeToUtf8Bytes(node, SerializerOptions);
return Task.FromResult(bytes);
}
private static JsonArray CreateSignaturesArray(AttestorSubmissionRequest request)
{
var array = new JsonArray();
foreach (var signature in request.Bundle.Dsse.Signatures)
foreach (var signature in request.Bundle.Dsse.Signatures
.OrderBy(s => s.KeyId ?? string.Empty, StringComparer.Ordinal)
.ThenBy(s => s.Signature, StringComparer.Ordinal))
{
var obj = new JsonObject
{

View File

@@ -7,4 +7,5 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0055-M | DONE | Maintainability audit for StellaOps.Attestor.Infrastructure. |
| AUDIT-0055-T | DONE | Test coverage audit for StellaOps.Attestor.Infrastructure. |
| AUDIT-0055-A | TODO | Pending approval for changes. |
| AUDIT-0055-A | DONE | Applied audit remediation and added infrastructure tests. |
| VAL-SMOKE-001 | DONE | Fixed continuation token behavior; unit tests pass. |

View File

@@ -214,7 +214,10 @@ internal sealed class AttestorVerificationService : IAttestorVerificationService
private async Task<AttestorEntry?> ResolveEntryByArtifactAsync(string artifactSha256, bool refreshProof, CancellationToken cancellationToken)
{
var entries = await _repository.GetByArtifactShaAsync(artifactSha256, cancellationToken).ConfigureAwait(false);
var entry = entries.OrderByDescending(e => e.CreatedAt).FirstOrDefault();
var entry = entries
.OrderByDescending(e => e.CreatedAt)
.ThenBy(e => e.RekorUuid, StringComparer.Ordinal)
.FirstOrDefault();
if (entry is null)
{
return null;

View File

@@ -7,6 +7,7 @@
#if STELLAOPS_EXPERIMENTAL_REKOR_QUEUE
using System;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
@@ -15,6 +16,7 @@ using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Queue;
using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Submission;
using System.Text.Json;
namespace StellaOps.Attestor.Infrastructure.Workers;
@@ -190,41 +192,90 @@ public sealed class RekorRetryWorker : BackgroundService
{
return backend.ToLowerInvariant() switch
{
"primary" => new RekorBackend(
_attestorOptions.Rekor.Primary.Url ?? throw new InvalidOperationException("Primary Rekor URL not configured"),
"primary"),
"mirror" => new RekorBackend(
_attestorOptions.Rekor.Mirror.Url ?? throw new InvalidOperationException("Mirror Rekor URL not configured"),
"mirror"),
"primary" => BuildBackend("primary", _attestorOptions.Rekor.Primary),
"mirror" => BuildBackend("mirror", _attestorOptions.Rekor.Mirror),
_ => throw new InvalidOperationException($"Unknown Rekor backend: {backend}")
};
}
private static AttestorSubmissionRequest BuildSubmissionRequest(RekorQueueItem item)
{
// Reconstruct the submission request from the stored payload
var dsseEnvelope = ParseDsseEnvelope(item.DssePayload);
return new AttestorSubmissionRequest
{
TenantId = item.TenantId,
BundleSha256 = item.BundleSha256,
DssePayload = item.DssePayload
Bundle = new AttestorSubmissionRequest.SubmissionBundle
{
Dsse = dsseEnvelope
},
Meta = new AttestorSubmissionRequest.SubmissionMeta
{
BundleSha256 = item.BundleSha256,
Artifact = new AttestorSubmissionRequest.ArtifactInfo()
}
};
}
private static AttestorSubmissionRequest.DsseEnvelope ParseDsseEnvelope(byte[] payload)
{
if (payload.Length == 0)
{
throw new InvalidOperationException("Queue item DSSE payload is empty.");
}
using var document = JsonDocument.Parse(payload);
var root = document.RootElement;
var payloadType = root.GetProperty("payloadType").GetString()
?? throw new InvalidOperationException("Queue item DSSE payload missing payloadType.");
var payloadBase64 = root.GetProperty("payload").GetString()
?? throw new InvalidOperationException("Queue item DSSE payload missing payload.");
var signatures = new List<AttestorSubmissionRequest.DsseSignature>();
if (root.TryGetProperty("signatures", out var signaturesElement) && signaturesElement.ValueKind == JsonValueKind.Array)
{
foreach (var signatureElement in signaturesElement.EnumerateArray())
{
var signatureValue = signatureElement.GetProperty("sig").GetString()
?? throw new InvalidOperationException("Queue item DSSE signature missing sig.");
signatureElement.TryGetProperty("keyid", out var keyIdElement);
signatures.Add(new AttestorSubmissionRequest.DsseSignature
{
Signature = signatureValue,
KeyId = keyIdElement.ValueKind == JsonValueKind.String ? keyIdElement.GetString() : null
});
}
}
if (signatures.Count == 0)
{
throw new InvalidOperationException("Queue item DSSE payload missing signatures.");
}
return new AttestorSubmissionRequest.DsseEnvelope
{
PayloadType = payloadType,
PayloadBase64 = payloadBase64,
Signatures = signatures
};
}
private static RekorBackend BuildBackend(string name, AttestorOptions.RekorBackendOptions options)
{
if (string.IsNullOrWhiteSpace(options.Url))
{
throw new InvalidOperationException($"Rekor backend '{name}' is not configured.");
}
return new RekorBackend
{
Name = name,
Url = new Uri(options.Url, UriKind.Absolute),
ProofTimeout = TimeSpan.FromMilliseconds(options.ProofTimeoutMs),
PollInterval = TimeSpan.FromMilliseconds(options.PollIntervalMs),
MaxAttempts = options.MaxAttempts
};
}
}
/// <summary>
/// Simple Rekor backend configuration.
/// </summary>
public sealed record RekorBackend(string Url, string Name);
/// <summary>
/// Submission request for the retry worker.
/// </summary>
public sealed class AttestorSubmissionRequest
{
public string TenantId { get; init; } = string.Empty;
public string BundleSha256 { get; init; } = string.Empty;
public byte[] DssePayload { get; init; } = Array.Empty<byte>();
}
#endif

View File

@@ -194,7 +194,8 @@ internal sealed class AttestorWebApplicationFactory : WebApplicationFactory<Prog
["attestor:s3:useTls"] = "false",
["attestor:redis:url"] = string.Empty,
["attestor:postgres:connectionString"] = "Host=localhost;Port=5432;Database=attestor-tests",
["attestor:postgres:database"] = "attestor-tests"
["attestor:postgres:database"] = "attestor-tests",
["EvidenceLocker:BaseUrl"] = "http://localhost"
};
configuration.AddInMemoryCollection(settings!);

View File

@@ -49,7 +49,7 @@ public sealed class AttestorSubmissionServiceTests
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var witnessClient = new TestTransparencyWitnessClient();
@@ -131,7 +131,7 @@ public sealed class AttestorSubmissionServiceTests
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var witnessClient = new TestTransparencyWitnessClient();
@@ -199,7 +199,7 @@ public sealed class AttestorSubmissionServiceTests
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var witnessClient = new TestTransparencyWitnessClient();
@@ -270,7 +270,7 @@ public sealed class AttestorSubmissionServiceTests
var validator = new AttestorSubmissionValidator(canonicalizer);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var witnessClient = new TestTransparencyWitnessClient();

View File

@@ -1,4 +1,3 @@
using System.Buffers.Binary;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
@@ -67,7 +66,7 @@ public sealed class AttestorVerificationServiceTests
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var submissionService = new AttestorSubmissionService(
@@ -163,7 +162,7 @@ public sealed class AttestorVerificationServiceTests
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var submissionService = new AttestorSubmissionService(
@@ -250,7 +249,7 @@ public sealed class AttestorVerificationServiceTests
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var submissionService = new AttestorSubmissionService(
@@ -416,19 +415,7 @@ public sealed class AttestorVerificationServiceTests
private static byte[] ComputePreAuthEncodingForTests(string payloadType, byte[] payload)
{
var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length];
var offset = 0;
Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset);
offset += 6;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length);
offset += 8;
Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length);
offset += headerBytes.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length);
offset += 8;
Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length);
return buffer;
return StellaOps.Attestor.Core.Signing.DssePreAuthenticationEncoding.Compute(payloadType, payload);
}
[Trait("Category", TestCategories.Unit)]
@@ -629,7 +616,7 @@ public sealed class AttestorVerificationServiceTests
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>(), TimeProvider.System);
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var witnessClient = new TestTransparencyWitnessClient

View File

@@ -0,0 +1,454 @@
using System.Security.Claims;
using System.Security.Cryptography.X509Certificates;
using Microsoft.AspNetCore.Http;
using StellaOps.Attestor.Core.Bulk;
using StellaOps.Attestor.Core.Offline;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Signing;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.WebService.Contracts;
namespace StellaOps.Attestor.WebService;
internal static class AttestorWebServiceEndpoints
{
public static void MapAttestorEndpoints(this WebApplication app, AttestorOptions attestorOptions)
{
app.MapGet("/api/v1/attestations", async (HttpRequest request, IAttestorEntryRepository repository, CancellationToken cancellationToken) =>
{
if (!AttestationListContracts.TryBuildQuery(request, out var query, out var error))
{
return error!;
}
var result = await repository.QueryAsync(query, cancellationToken).ConfigureAwait(false);
var response = new AttestationListResponseDto
{
Items = result.Items.Select(MapToListItem).ToList(),
ContinuationToken = result.ContinuationToken
};
return Results.Ok(response);
})
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads");
app.MapPost("/api/v1/attestations:export", async (HttpContext httpContext, AttestationExportRequestDto? requestDto, IAttestorBundleService bundleService, CancellationToken cancellationToken) =>
{
if (httpContext.Request.ContentLength > 0 && !IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
AttestorBundleExportRequest request;
if (requestDto is null)
{
request = new AttestorBundleExportRequest();
}
else if (!requestDto.TryToDomain(out request, out var error))
{
return error!;
}
var package = await bundleService.ExportAsync(request, cancellationToken).ConfigureAwait(false);
return Results.Ok(package);
})
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads")
.Produces<AttestorBundlePackage>(StatusCodes.Status200OK);
app.MapPost("/api/v1/attestations:import", async (HttpContext httpContext, AttestorBundlePackage package, IAttestorBundleService bundleService, CancellationToken cancellationToken) =>
{
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
var result = await bundleService.ImportAsync(package, cancellationToken).ConfigureAwait(false);
return Results.Ok(result);
})
.RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions")
.Produces<AttestorBundleImportResult>(StatusCodes.Status200OK);
app.MapPost("/api/v1/attestations:sign", async (AttestationSignRequestDto? requestDto, HttpContext httpContext, IAttestationSigningService signingService, CancellationToken cancellationToken) =>
{
if (requestDto is null)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Request body is required.");
}
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
var certificate = httpContext.Connection.ClientCertificate;
if (certificate is null)
{
return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required");
}
var user = httpContext.User;
if (user?.Identity is not { IsAuthenticated: true })
{
return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required");
}
var signingRequest = new AttestationSignRequest
{
KeyId = requestDto.KeyId ?? string.Empty,
PayloadType = requestDto.PayloadType ?? string.Empty,
PayloadBase64 = requestDto.Payload ?? string.Empty,
Mode = requestDto.Mode,
CertificateChain = requestDto.CertificateChain ?? new List<string>(),
Artifact = new AttestorSubmissionRequest.ArtifactInfo
{
Sha256 = requestDto.Artifact?.Sha256 ?? string.Empty,
Kind = requestDto.Artifact?.Kind ?? string.Empty,
ImageDigest = requestDto.Artifact?.ImageDigest,
SubjectUri = requestDto.Artifact?.SubjectUri
},
LogPreference = requestDto.LogPreference ?? "primary",
Archive = requestDto.Archive ?? true
};
try
{
var submissionContext = BuildSubmissionContext(user, certificate);
var result = await signingService.SignAsync(signingRequest, submissionContext, cancellationToken).ConfigureAwait(false);
var response = new AttestationSignResponseDto
{
Bundle = result.Bundle,
Meta = result.Meta,
Key = new AttestationSignKeyDto
{
KeyId = result.KeyId,
Algorithm = result.Algorithm,
Mode = result.Mode,
Provider = result.Provider,
SignedAt = result.SignedAt.ToString("O")
}
};
return Results.Ok(response);
}
catch (AttestorSigningException signingEx)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: signingEx.Message, extensions: new Dictionary<string, object?>
{
["code"] = signingEx.Code
});
}
}).RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions");
app.MapPost("/api/v1/rekor/entries", async (AttestorSubmissionRequest request, HttpContext httpContext, IAttestorSubmissionService submissionService, CancellationToken cancellationToken) =>
{
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
var certificate = httpContext.Connection.ClientCertificate;
if (certificate is null)
{
return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required");
}
var user = httpContext.User;
if (user?.Identity is not { IsAuthenticated: true })
{
return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required");
}
var submissionContext = BuildSubmissionContext(user, certificate);
try
{
var result = await submissionService.SubmitAsync(request, submissionContext, cancellationToken).ConfigureAwait(false);
return Results.Ok(result);
}
catch (AttestorValidationException validationEx)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: validationEx.Message, extensions: new Dictionary<string, object?>
{
["code"] = validationEx.Code
});
}
})
.RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions");
app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
await GetAttestationDetailResultAsync(uuid, refresh is true, verificationService, cancellationToken))
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads");
app.MapGet("/api/v1/attestations/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
await GetAttestationDetailResultAsync(uuid, refresh is true, verificationService, cancellationToken))
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads");
app.MapPost("/api/v1/rekor/verify", async (HttpContext httpContext, AttestorVerificationRequest verifyRequest, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
{
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
try
{
var result = await verificationService.VerifyAsync(verifyRequest, cancellationToken).ConfigureAwait(false);
return Results.Ok(result);
}
catch (AttestorVerificationException ex)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: ex.Message, extensions: new Dictionary<string, object?>
{
["code"] = ex.Code
});
}
})
.RequireAuthorization("attestor:verify")
.RequireRateLimiting("attestor-verifications");
app.MapPost("/api/v1/rekor/verify:bulk", async (
BulkVerificationRequestDto? requestDto,
HttpContext httpContext,
IBulkVerificationJobStore jobStore,
CancellationToken cancellationToken) =>
{
var context = BuildBulkJobContext(httpContext.User);
if (!BulkVerificationContracts.TryBuildJob(requestDto, attestorOptions, context, out var job, out var error))
{
return error!;
}
var queued = await jobStore.CountQueuedAsync(cancellationToken).ConfigureAwait(false);
if (queued >= Math.Max(1, attestorOptions.Quotas.Bulk.MaxQueuedJobs))
{
return Results.Problem(statusCode: StatusCodes.Status429TooManyRequests, title: "Too many bulk verification jobs queued. Try again later.");
}
job = await jobStore.CreateAsync(job!, cancellationToken).ConfigureAwait(false);
var response = BulkVerificationContracts.MapJob(job);
return Results.Accepted($"/api/v1/rekor/verify:bulk/{job.Id}", response);
}).RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-bulk");
app.MapGet("/api/v1/rekor/verify:bulk/{jobId}", async (
string jobId,
HttpContext httpContext,
IBulkVerificationJobStore jobStore,
CancellationToken cancellationToken) =>
{
if (string.IsNullOrWhiteSpace(jobId))
{
return Results.NotFound();
}
var job = await jobStore.GetAsync(jobId, cancellationToken).ConfigureAwait(false);
if (job is null || !IsAuthorizedForJob(job, httpContext.User))
{
return Results.NotFound();
}
return Results.Ok(BulkVerificationContracts.MapJob(job));
}).RequireAuthorization("attestor:write");
}
private static async Task<IResult> GetAttestationDetailResultAsync(
string uuid,
bool refresh,
IAttestorVerificationService verificationService,
CancellationToken cancellationToken)
{
var entry = await verificationService.GetEntryAsync(uuid, refresh, cancellationToken).ConfigureAwait(false);
if (entry is null)
{
return Results.NotFound();
}
return Results.Ok(MapAttestationDetail(entry));
}
private static AttestationDetailResponseDto MapAttestationDetail(AttestorEntry entry)
{
return new AttestationDetailResponseDto
{
Uuid = entry.RekorUuid,
Index = entry.Index,
Backend = entry.Log.Backend,
Proof = entry.Proof is null ? null : new AttestationProofDto
{
Checkpoint = entry.Proof.Checkpoint is null ? null : new AttestationCheckpointDto
{
Origin = entry.Proof.Checkpoint.Origin,
Size = entry.Proof.Checkpoint.Size,
RootHash = entry.Proof.Checkpoint.RootHash,
Timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O")
},
Inclusion = entry.Proof.Inclusion is null ? null : new AttestationInclusionDto
{
LeafHash = entry.Proof.Inclusion.LeafHash,
Path = entry.Proof.Inclusion.Path
}
},
LogUrl = entry.Log.Url,
Status = entry.Status,
Mirror = entry.Mirror is null ? null : new AttestationMirrorDto
{
Backend = entry.Mirror.Backend,
Uuid = entry.Mirror.Uuid,
Index = entry.Mirror.Index,
LogUrl = entry.Mirror.Url,
Status = entry.Mirror.Status,
Proof = entry.Mirror.Proof is null ? null : new AttestationProofDto
{
Checkpoint = entry.Mirror.Proof.Checkpoint is null ? null : new AttestationCheckpointDto
{
Origin = entry.Mirror.Proof.Checkpoint.Origin,
Size = entry.Mirror.Proof.Checkpoint.Size,
RootHash = entry.Mirror.Proof.Checkpoint.RootHash,
Timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O")
},
Inclusion = entry.Mirror.Proof.Inclusion is null ? null : new AttestationInclusionDto
{
LeafHash = entry.Mirror.Proof.Inclusion.LeafHash,
Path = entry.Mirror.Proof.Inclusion.Path
}
},
Error = entry.Mirror.Error
},
Artifact = new AttestationArtifactDto
{
Sha256 = entry.Artifact.Sha256,
Kind = entry.Artifact.Kind,
ImageDigest = entry.Artifact.ImageDigest,
SubjectUri = entry.Artifact.SubjectUri
}
};
}
private static AttestationListItemDto MapToListItem(AttestorEntry entry)
{
return new AttestationListItemDto
{
Uuid = entry.RekorUuid,
Status = entry.Status,
CreatedAt = entry.CreatedAt.ToString("O"),
Artifact = new AttestationArtifactDto
{
Sha256 = entry.Artifact.Sha256,
Kind = entry.Artifact.Kind,
ImageDigest = entry.Artifact.ImageDigest,
SubjectUri = entry.Artifact.SubjectUri
},
Signer = new AttestationSignerDto
{
Mode = entry.SignerIdentity.Mode,
Issuer = entry.SignerIdentity.Issuer,
Subject = entry.SignerIdentity.SubjectAlternativeName,
KeyId = entry.SignerIdentity.KeyId
},
Log = new AttestationLogDto
{
Backend = entry.Log.Backend,
Url = entry.Log.Url,
Index = entry.Index,
Status = entry.Status
},
Mirror = entry.Mirror is null ? null : new AttestationLogDto
{
Backend = entry.Mirror.Backend,
Url = entry.Mirror.Url,
Index = entry.Mirror.Index,
Status = entry.Mirror.Status
}
};
}
private static SubmissionContext BuildSubmissionContext(ClaimsPrincipal user, X509Certificate2 certificate)
{
var subject = user.FindFirst("sub")?.Value ?? certificate.Subject;
var audience = user.FindFirst("aud")?.Value ?? string.Empty;
var clientId = user.FindFirst("client_id")?.Value;
var tenant = user.FindFirst("tenant")?.Value;
return new SubmissionContext
{
CallerSubject = subject,
CallerAudience = audience,
CallerClientId = clientId,
CallerTenant = tenant,
ClientCertificate = certificate,
MtlsThumbprint = certificate.Thumbprint
};
}
private static BulkVerificationJobContext BuildBulkJobContext(ClaimsPrincipal user)
{
var scopes = user.FindAll("scope")
.Select(claim => claim.Value)
.Where(value => !string.IsNullOrWhiteSpace(value))
.ToList();
return new BulkVerificationJobContext
{
Tenant = user.FindFirst("tenant")?.Value,
RequestedBy = user.FindFirst("sub")?.Value,
ClientId = user.FindFirst("client_id")?.Value,
Scopes = scopes
};
}
private static bool IsAuthorizedForJob(BulkVerificationJob job, ClaimsPrincipal user)
{
var tenant = user.FindFirst("tenant")?.Value;
if (!string.IsNullOrEmpty(job.Context.Tenant) &&
!string.Equals(job.Context.Tenant, tenant, StringComparison.Ordinal))
{
return false;
}
var subject = user.FindFirst("sub")?.Value;
if (!string.IsNullOrEmpty(job.Context.RequestedBy) &&
!string.Equals(job.Context.RequestedBy, subject, StringComparison.Ordinal))
{
return false;
}
return true;
}
private static bool IsJsonContentType(string? contentType)
{
if (string.IsNullOrWhiteSpace(contentType))
{
return false;
}
var mediaType = contentType.Split(';', 2)[0].Trim();
if (mediaType.Length == 0)
{
return false;
}
return mediaType.EndsWith("/json", StringComparison.OrdinalIgnoreCase)
|| mediaType.Contains("+json", StringComparison.OrdinalIgnoreCase);
}
private static IResult UnsupportedMediaTypeResult()
{
return Results.Problem(
statusCode: StatusCodes.Status415UnsupportedMediaType,
title: "Unsupported content type. Submit application/json payloads.",
extensions: new Dictionary<string, object?>
{
["code"] = "unsupported_media_type"
});
}
}

View File

@@ -0,0 +1,87 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.WebService.Contracts;
public sealed class AttestationDetailResponseDto
{
[JsonPropertyName("uuid")]
public required string Uuid { get; init; }
[JsonPropertyName("index")]
public long? Index { get; init; }
[JsonPropertyName("backend")]
public required string Backend { get; init; }
[JsonPropertyName("proof")]
public AttestationProofDto? Proof { get; init; }
[JsonPropertyName("logURL")]
public required string LogUrl { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("mirror")]
public AttestationMirrorDto? Mirror { get; init; }
[JsonPropertyName("artifact")]
public required AttestationArtifactDto Artifact { get; init; }
}
public sealed class AttestationProofDto
{
[JsonPropertyName("checkpoint")]
public AttestationCheckpointDto? Checkpoint { get; init; }
[JsonPropertyName("inclusion")]
public AttestationInclusionDto? Inclusion { get; init; }
}
public sealed class AttestationCheckpointDto
{
[JsonPropertyName("origin")]
public string? Origin { get; init; }
[JsonPropertyName("size")]
public long Size { get; init; }
[JsonPropertyName("rootHash")]
public string? RootHash { get; init; }
[JsonPropertyName("timestamp")]
public string? Timestamp { get; init; }
}
public sealed class AttestationInclusionDto
{
[JsonPropertyName("leafHash")]
public string? LeafHash { get; init; }
[JsonPropertyName("path")]
public IReadOnlyList<string> Path { get; init; } = Array.Empty<string>();
}
public sealed class AttestationMirrorDto
{
[JsonPropertyName("backend")]
public required string Backend { get; init; }
[JsonPropertyName("uuid")]
public string? Uuid { get; init; }
[JsonPropertyName("index")]
public long? Index { get; init; }
[JsonPropertyName("logURL")]
public required string LogUrl { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("proof")]
public AttestationProofDto? Proof { get; init; }
[JsonPropertyName("error")]
public string? Error { get; init; }
}

View File

@@ -1,4 +1,6 @@
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
using StellaOps.Attestor.WebService.Contracts.Anchors;
namespace StellaOps.Attestor.WebService.Controllers;
@@ -25,14 +27,13 @@ public class AnchorsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>List of trust anchors.</returns>
[HttpGet]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(typeof(TrustAnchorDto[]), StatusCodes.Status200OK)]
public async Task<ActionResult<TrustAnchorDto[]>> GetAnchorsAsync(CancellationToken ct = default)
{
_logger.LogInformation("Getting all trust anchors");
// TODO: Implement using IProofChainRepository.GetActiveTrustAnchorsAsync
return Ok(Array.Empty<TrustAnchorDto>());
return NotImplementedResult();
}
/// <summary>
@@ -42,6 +43,8 @@ public class AnchorsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The trust anchor.</returns>
[HttpGet("{anchorId}")]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(typeof(TrustAnchorDto), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
@@ -49,26 +52,8 @@ public class AnchorsController : ControllerBase
[FromRoute] string anchorId,
CancellationToken ct = default)
{
if (!Guid.TryParse(anchorId, out var parsedAnchorId))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid anchor ID",
Detail = "Anchor ID must be a valid GUID.",
Status = StatusCodes.Status400BadRequest
});
}
_logger.LogInformation("Getting trust anchor {AnchorId}", parsedAnchorId);
// TODO: Implement using IProofChainRepository.GetTrustAnchorAsync
return NotFound(new ProblemDetails
{
Title = "Trust Anchor Not Found",
Detail = $"No trust anchor found with ID {parsedAnchorId}",
Status = StatusCodes.Status404NotFound
});
_logger.LogInformation("Getting trust anchor {AnchorId}", anchorId);
return NotImplementedResult();
}
/// <summary>
@@ -78,6 +63,8 @@ public class AnchorsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The created trust anchor.</returns>
[HttpPost]
[Authorize("attestor:write")]
[EnableRateLimiting("attestor-submissions")]
[ProducesResponseType(typeof(TrustAnchorDto), StatusCodes.Status201Created)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
[ProducesResponseType(StatusCodes.Status409Conflict)]
@@ -86,26 +73,7 @@ public class AnchorsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Creating trust anchor for pattern {Pattern}", request.PurlPattern);
// TODO: Implement using IProofChainRepository.SaveTrustAnchorAsync
// 1. Check for existing anchor with same pattern
// 2. Create new anchor entity
// 3. Save to repository
// 4. Log audit entry
var anchor = new TrustAnchorDto
{
AnchorId = Guid.NewGuid(),
PurlPattern = request.PurlPattern,
AllowedKeyIds = request.AllowedKeyIds,
AllowedPredicateTypes = request.AllowedPredicateTypes,
PolicyRef = request.PolicyRef,
PolicyVersion = request.PolicyVersion,
CreatedAt = DateTimeOffset.UtcNow,
UpdatedAt = DateTimeOffset.UtcNow
};
return CreatedAtAction(nameof(GetAnchorAsync), new { anchorId = anchor.AnchorId }, anchor);
return NotImplementedResult();
}
/// <summary>
@@ -116,6 +84,8 @@ public class AnchorsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The updated trust anchor.</returns>
[HttpPatch("{anchorId:guid}")]
[Authorize("attestor:write")]
[EnableRateLimiting("attestor-submissions")]
[ProducesResponseType(typeof(TrustAnchorDto), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult<TrustAnchorDto>> UpdateAnchorAsync(
@@ -124,19 +94,7 @@ public class AnchorsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Updating trust anchor {AnchorId}", anchorId);
// TODO: Implement using IProofChainRepository
// 1. Get existing anchor
// 2. Apply updates
// 3. Save to repository
// 4. Log audit entry
return NotFound(new ProblemDetails
{
Title = "Trust Anchor Not Found",
Detail = $"No trust anchor found with ID {anchorId}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
/// <summary>
@@ -147,6 +105,8 @@ public class AnchorsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>No content on success.</returns>
[HttpPost("{anchorId:guid}/revoke-key")]
[Authorize("attestor:write")]
[EnableRateLimiting("attestor-submissions")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
@@ -156,20 +116,7 @@ public class AnchorsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Revoking key {KeyId} in anchor {AnchorId}", request.KeyId, anchorId);
// TODO: Implement using IProofChainRepository.RevokeKeyAsync
// 1. Get existing anchor
// 2. Add key to revoked_keys
// 3. Remove from allowed_keyids
// 4. Save to repository
// 5. Log audit entry
return NotFound(new ProblemDetails
{
Title = "Trust Anchor Not Found",
Detail = $"No trust anchor found with ID {anchorId}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
/// <summary>
@@ -179,6 +126,8 @@ public class AnchorsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>No content on success.</returns>
[HttpDelete("{anchorId:guid}")]
[Authorize("attestor:write")]
[EnableRateLimiting("attestor-submissions")]
[ProducesResponseType(StatusCodes.Status204NoContent)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult> DeleteAnchorAsync(
@@ -186,14 +135,19 @@ public class AnchorsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Deactivating trust anchor {AnchorId}", anchorId);
return NotImplementedResult();
}
// TODO: Implement - set is_active = false (soft delete)
return NotFound(new ProblemDetails
private static ObjectResult NotImplementedResult()
{
return new ObjectResult(new ProblemDetails
{
Title = "Trust Anchor Not Found",
Detail = $"No trust anchor found with ID {anchorId}",
Status = StatusCodes.Status404NotFound
});
Title = "Trust anchor management is not implemented.",
Status = StatusCodes.Status501NotImplemented,
Extensions = { ["code"] = "feature_not_implemented" }
})
{
StatusCode = StatusCodes.Status501NotImplemented
};
}
}

View File

@@ -1,6 +1,6 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
using StellaOps.Attestor.WebService.Contracts.Proofs;
namespace StellaOps.Attestor.WebService.Controllers;
@@ -29,6 +29,8 @@ public class ProofsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The created proof bundle ID.</returns>
[HttpPost("{entry}/spine")]
[Authorize("attestor:write")]
[EnableRateLimiting("attestor-submissions")]
[ProducesResponseType(typeof(CreateSpineResponse), StatusCodes.Status201Created)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
@@ -39,49 +41,7 @@ public class ProofsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Creating proof spine for entry {Entry}", entry);
// Validate entry format
if (!IsValidSbomEntryId(entry))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid SBOM Entry ID",
Detail = "Entry ID must be in format sha256:<hex>:pkg:<purl>",
Status = StatusCodes.Status400BadRequest
});
}
// TODO: Implement spine creation using IProofSpineAssembler
// 1. Validate all evidence IDs exist
// 2. Validate reasoning ID exists
// 3. Validate VEX verdict ID exists
// 4. Assemble spine using merkle tree
// 5. Sign and store spine
// 6. Return proof bundle ID
foreach (var evidenceId in request.EvidenceIds)
{
if (!IsValidSha256Id(evidenceId))
{
return UnprocessableEntity(new ProblemDetails
{
Title = "Invalid evidence ID",
Detail = "Evidence IDs must be in format sha256:<64-hex>",
Status = StatusCodes.Status422UnprocessableEntity
});
}
}
var proofBundleId = ComputeProofBundleId(entry, request);
var receiptUrl = $"/proofs/{Uri.EscapeDataString(entry)}/receipt";
var response = new CreateSpineResponse
{
ProofBundleId = proofBundleId,
ReceiptUrl = receiptUrl
};
return Created(receiptUrl, response);
return NotImplementedResult();
}
/// <summary>
@@ -91,6 +51,8 @@ public class ProofsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The verification receipt.</returns>
[HttpGet("{entry}/receipt")]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(typeof(VerificationReceiptDto), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult<VerificationReceiptDto>> GetReceiptAsync(
@@ -98,18 +60,7 @@ public class ProofsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Getting receipt for entry {Entry}", entry);
// TODO: Implement receipt retrieval using IReceiptGenerator
// 1. Get spine for entry
// 2. Generate/retrieve verification receipt
// 3. Return receipt
return NotFound(new ProblemDetails
{
Title = "Receipt Not Found",
Detail = $"No verification receipt found for entry {entry}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
/// <summary>
@@ -119,6 +70,8 @@ public class ProofsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The proof spine details.</returns>
[HttpGet("{entry}/spine")]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult> GetSpineAsync(
@@ -126,15 +79,7 @@ public class ProofsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Getting spine for entry {Entry}", entry);
// TODO: Implement spine retrieval
return NotFound(new ProblemDetails
{
Title = "Spine Not Found",
Detail = $"No proof spine found for entry {entry}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
/// <summary>
@@ -144,6 +89,8 @@ public class ProofsController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The VEX statement.</returns>
[HttpGet("{entry}/vex")]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult> GetVexAsync(
@@ -151,88 +98,19 @@ public class ProofsController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Getting VEX for entry {Entry}", entry);
// TODO: Implement VEX retrieval
return NotFound(new ProblemDetails
{
Title = "VEX Not Found",
Detail = $"No VEX statement found for entry {entry}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
private static bool IsValidSbomEntryId(string entry)
private static ObjectResult NotImplementedResult()
{
// Format: sha256:<64-hex>:pkg:<purl>
if (string.IsNullOrWhiteSpace(entry))
return false;
var parts = entry.Split(':', 4);
if (parts.Length < 4)
return false;
return parts[0] == "sha256"
&& parts[1].Length == 64
&& parts[1].All(c => "0123456789abcdef".Contains(c))
&& parts[2] == "pkg";
}
private static string ComputeProofBundleId(string entry, CreateSpineRequest request)
{
var evidenceIds = request.EvidenceIds
.Select(static value => (value ?? string.Empty).Trim())
.Where(static value => value.Length > 0)
.Distinct(StringComparer.Ordinal)
.OrderBy(static value => value, StringComparer.Ordinal);
var material = string.Join(
"\n",
new[]
{
entry.Trim(),
request.PolicyVersion.Trim(),
request.ReasoningId.Trim(),
request.VexVerdictId.Trim()
}.Concat(evidenceIds));
var digest = SHA256.HashData(Encoding.UTF8.GetBytes(material));
return $"sha256:{Convert.ToHexString(digest).ToLowerInvariant()}";
}
private static bool IsValidSha256Id(string value)
{
if (string.IsNullOrWhiteSpace(value))
return new ObjectResult(new ProblemDetails
{
return false;
}
if (!value.StartsWith("sha256:", StringComparison.Ordinal))
Title = "Proof chain endpoints are not implemented.",
Status = StatusCodes.Status501NotImplemented,
Extensions = { ["code"] = "feature_not_implemented" }
})
{
return false;
}
var hex = value.AsSpan()["sha256:".Length..];
if (hex.Length != 64)
{
return false;
}
foreach (var c in hex)
{
if (c is >= '0' and <= '9')
{
continue;
}
if (c is >= 'a' and <= 'f')
{
continue;
}
return false;
}
return true;
StatusCode = StatusCodes.Status501NotImplemented
};
}
}

View File

@@ -1,4 +1,6 @@
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.RateLimiting;
using StellaOps.Attestor.WebService.Contracts.Proofs;
namespace StellaOps.Attestor.WebService.Controllers;
@@ -27,6 +29,8 @@ public class VerifyController : ControllerBase
/// <param name="ct">Cancellation token.</param>
/// <returns>The verification receipt.</returns>
[HttpPost("{proofBundleId}")]
[Authorize("attestor:verify")]
[EnableRateLimiting("attestor-verifications")]
[ProducesResponseType(typeof(VerificationReceiptDto), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status400BadRequest)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
@@ -35,88 +39,13 @@ public class VerifyController : ControllerBase
[FromBody] VerifyProofRequest? request,
CancellationToken ct = default)
{
if (!IsValidSha256Id(proofBundleId))
{
return BadRequest(new ProblemDetails
{
Title = "Invalid proof bundle ID",
Detail = "Proof bundle ID must be in format sha256:<64-hex>",
Status = StatusCodes.Status400BadRequest
});
}
request ??= new VerifyProofRequest
{
ProofBundleId = proofBundleId
};
_logger.LogInformation("Verifying proof bundle {BundleId}", proofBundleId);
// TODO: Implement using IVerificationPipeline per advisory §9.1
// Pipeline steps:
// 1. DSSE signature verification (for each envelope in chain)
// 2. ID recomputation (verify content-addressed IDs match)
// 3. Merkle root verification (recompute ProofBundleID)
// 4. Trust anchor matching (verify signer key is allowed)
// 5. Rekor inclusion proof verification (if enabled)
// 6. Policy version compatibility check
// 7. Key revocation check
var checks = new List<VerificationCheckDto>
{
new()
{
Check = "dsse_signature",
Status = "pass",
KeyId = "example-key-id"
},
new()
{
Check = "id_recomputation",
Status = "pass"
},
new()
{
Check = "merkle_root",
Status = "pass"
},
new()
{
Check = "trust_anchor",
Status = "pass"
}
};
if (request.VerifyRekor)
{
checks.Add(new VerificationCheckDto
{
Check = "rekor_inclusion",
Status = "pass",
LogIndex = 12345678
});
}
var receipt = new VerificationReceiptDto
{
ProofBundleId = proofBundleId,
VerifiedAt = DateTimeOffset.UtcNow,
VerifierVersion = "1.0.0",
AnchorId = request.AnchorId,
Result = "pass",
Checks = checks.ToArray()
};
return Ok(receipt);
return NotImplementedResult();
}
/// <summary>
/// Verify a DSSE envelope signature.
/// </summary>
/// <param name="envelopeHash">The envelope body hash.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Signature verification result.</returns>
[HttpGet("envelope/{envelopeHash}")]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult> VerifyEnvelopeAsync(
@@ -124,24 +53,12 @@ public class VerifyController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Verifying envelope {Hash}", envelopeHash);
// TODO: Implement DSSE envelope verification
return NotFound(new ProblemDetails
{
Title = "Envelope Not Found",
Detail = $"No envelope found with hash {envelopeHash}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
/// <summary>
/// Verify Rekor inclusion for an envelope.
/// </summary>
/// <param name="envelopeHash">The envelope body hash.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Rekor verification result.</returns>
[HttpGet("rekor/{envelopeHash}")]
[Authorize("attestor:read")]
[EnableRateLimiting("attestor-reads")]
[ProducesResponseType(StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
public async Task<ActionResult> VerifyRekorAsync(
@@ -149,50 +66,19 @@ public class VerifyController : ControllerBase
CancellationToken ct = default)
{
_logger.LogInformation("Verifying Rekor inclusion for {Hash}", envelopeHash);
// TODO: Implement Rekor inclusion proof verification
return NotFound(new ProblemDetails
{
Title = "Rekor Entry Not Found",
Detail = $"No Rekor entry found for envelope {envelopeHash}",
Status = StatusCodes.Status404NotFound
});
return NotImplementedResult();
}
private static bool IsValidSha256Id(string value)
private static ObjectResult NotImplementedResult()
{
if (string.IsNullOrWhiteSpace(value))
return new ObjectResult(new ProblemDetails
{
return false;
}
if (!value.StartsWith("sha256:", StringComparison.Ordinal))
Title = "Verification endpoints are not implemented.",
Status = StatusCodes.Status501NotImplemented,
Extensions = { ["code"] = "feature_not_implemented" }
})
{
return false;
}
var hex = value.AsSpan()["sha256:".Length..];
if (hex.Length != 64)
{
return false;
}
foreach (var c in hex)
{
if (c is >= '0' and <= '9')
{
continue;
}
if (c is >= 'a' and <= 'f')
{
continue;
}
return false;
}
return true;
StatusCode = StatusCodes.Status501NotImplemented
};
}
}

View File

@@ -24,6 +24,7 @@ using OpenTelemetry.Trace;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.WebService;
using StellaOps.Attestor.WebService.Contracts;
using StellaOps.Attestor.Core.Bulk;
using Microsoft.AspNetCore.Server.Kestrel.Https;
@@ -161,13 +162,16 @@ builder.Services.AddScoped<StellaOps.Attestor.WebService.Services.IPredicateType
builder.Services.AddHttpContextAccessor();
// Configure HttpClient for Evidence Locker integration
var evidenceLockerUrl = builder.Configuration.GetValue<string>("EvidenceLocker:BaseUrl")
?? builder.Configuration.GetValue<string>("EvidenceLockerUrl");
if (string.IsNullOrWhiteSpace(evidenceLockerUrl))
{
throw new InvalidOperationException("EvidenceLocker base URL must be configured (EvidenceLocker:BaseUrl or EvidenceLockerUrl).");
}
builder.Services.AddHttpClient("EvidenceLocker", client =>
{
// TODO: Configure base address from configuration
// For now, use localhost default (will be overridden by actual configuration)
var evidenceLockerUrl = builder.Configuration.GetValue<string>("EvidenceLockerUrl")
?? "http://localhost:9090";
client.BaseAddress = new Uri(evidenceLockerUrl);
client.BaseAddress = new Uri(evidenceLockerUrl, UriKind.Absolute);
client.Timeout = TimeSpan.FromSeconds(30);
});
@@ -374,419 +378,13 @@ app.MapHealthChecks("/health/live");
app.MapControllers();
app.MapGet("/api/v1/attestations", async (HttpRequest request, IAttestorEntryRepository repository, CancellationToken cancellationToken) =>
{
if (!AttestationListContracts.TryBuildQuery(request, out var query, out var error))
{
return error!;
}
var result = await repository.QueryAsync(query, cancellationToken).ConfigureAwait(false);
var response = new AttestationListResponseDto
{
Items = result.Items.Select(MapToListItem).ToList(),
ContinuationToken = result.ContinuationToken
};
return Results.Ok(response);
})
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads");
app.MapPost("/api/v1/attestations:export", async (HttpContext httpContext, AttestationExportRequestDto? requestDto, IAttestorBundleService bundleService, CancellationToken cancellationToken) =>
{
if (httpContext.Request.ContentLength > 0 && !IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
AttestorBundleExportRequest request;
if (requestDto is null)
{
request = new AttestorBundleExportRequest();
}
else if (!requestDto.TryToDomain(out request, out var error))
{
return error!;
}
var package = await bundleService.ExportAsync(request, cancellationToken).ConfigureAwait(false);
return Results.Ok(package);
})
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads")
.Produces<AttestorBundlePackage>(StatusCodes.Status200OK);
app.MapPost("/api/v1/attestations:import", async (HttpContext httpContext, AttestorBundlePackage package, IAttestorBundleService bundleService, CancellationToken cancellationToken) =>
{
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
var result = await bundleService.ImportAsync(package, cancellationToken).ConfigureAwait(false);
return Results.Ok(result);
})
.RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions")
.Produces<AttestorBundleImportResult>(StatusCodes.Status200OK);
app.MapPost("/api/v1/attestations:sign", async (AttestationSignRequestDto? requestDto, HttpContext httpContext, IAttestationSigningService signingService, CancellationToken cancellationToken) =>
{
if (requestDto is null)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: "Request body is required.");
}
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
var certificate = httpContext.Connection.ClientCertificate;
if (certificate is null)
{
return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required");
}
var user = httpContext.User;
if (user?.Identity is not { IsAuthenticated: true })
{
return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required");
}
var signingRequest = new AttestationSignRequest
{
KeyId = requestDto.KeyId ?? string.Empty,
PayloadType = requestDto.PayloadType ?? string.Empty,
PayloadBase64 = requestDto.Payload ?? string.Empty,
Mode = requestDto.Mode,
CertificateChain = requestDto.CertificateChain ?? new List<string>(),
Artifact = new AttestorSubmissionRequest.ArtifactInfo
{
Sha256 = requestDto.Artifact?.Sha256 ?? string.Empty,
Kind = requestDto.Artifact?.Kind ?? string.Empty,
ImageDigest = requestDto.Artifact?.ImageDigest,
SubjectUri = requestDto.Artifact?.SubjectUri
},
LogPreference = requestDto.LogPreference ?? "primary",
Archive = requestDto.Archive ?? true
};
try
{
var submissionContext = BuildSubmissionContext(user, certificate);
var result = await signingService.SignAsync(signingRequest, submissionContext, cancellationToken).ConfigureAwait(false);
var response = new AttestationSignResponseDto
{
Bundle = result.Bundle,
Meta = result.Meta,
Key = new AttestationSignKeyDto
{
KeyId = result.KeyId,
Algorithm = result.Algorithm,
Mode = result.Mode,
Provider = result.Provider,
SignedAt = result.SignedAt.ToString("O")
}
};
return Results.Ok(response);
}
catch (AttestorSigningException signingEx)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: signingEx.Message, extensions: new Dictionary<string, object?>
{
["code"] = signingEx.Code
});
}
}).RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions");
app.MapPost("/api/v1/rekor/entries", async (AttestorSubmissionRequest request, HttpContext httpContext, IAttestorSubmissionService submissionService, CancellationToken cancellationToken) =>
{
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
var certificate = httpContext.Connection.ClientCertificate;
if (certificate is null)
{
return Results.Problem(statusCode: StatusCodes.Status403Forbidden, title: "Client certificate required");
}
var user = httpContext.User;
if (user?.Identity is not { IsAuthenticated: true })
{
return Results.Problem(statusCode: StatusCodes.Status401Unauthorized, title: "Authentication required");
}
var submissionContext = BuildSubmissionContext(user, certificate);
try
{
var result = await submissionService.SubmitAsync(request, submissionContext, cancellationToken).ConfigureAwait(false);
return Results.Ok(result);
}
catch (AttestorValidationException validationEx)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: validationEx.Message, extensions: new Dictionary<string, object?>
{
["code"] = validationEx.Code
});
}
})
.RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-submissions");
app.MapGet("/api/v1/rekor/entries/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
await GetAttestationDetailResultAsync(uuid, refresh is true, verificationService, cancellationToken))
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads");
app.MapGet("/api/v1/attestations/{uuid}", async (string uuid, bool? refresh, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
await GetAttestationDetailResultAsync(uuid, refresh is true, verificationService, cancellationToken))
.RequireAuthorization("attestor:read")
.RequireRateLimiting("attestor-reads");
app.MapPost("/api/v1/rekor/verify", async (HttpContext httpContext, AttestorVerificationRequest verifyRequest, IAttestorVerificationService verificationService, CancellationToken cancellationToken) =>
{
if (!IsJsonContentType(httpContext.Request.ContentType))
{
return UnsupportedMediaTypeResult();
}
try
{
var result = await verificationService.VerifyAsync(verifyRequest, cancellationToken).ConfigureAwait(false);
return Results.Ok(result);
}
catch (AttestorVerificationException ex)
{
return Results.Problem(statusCode: StatusCodes.Status400BadRequest, title: ex.Message, extensions: new Dictionary<string, object?>
{
["code"] = ex.Code
});
}
})
.RequireAuthorization("attestor:verify")
.RequireRateLimiting("attestor-verifications");
app.MapPost("/api/v1/rekor/verify:bulk", async (
BulkVerificationRequestDto? requestDto,
HttpContext httpContext,
IBulkVerificationJobStore jobStore,
CancellationToken cancellationToken) =>
{
var context = BuildBulkJobContext(httpContext.User);
if (!BulkVerificationContracts.TryBuildJob(requestDto, attestorOptions, context, out var job, out var error))
{
return error!;
}
var queued = await jobStore.CountQueuedAsync(cancellationToken).ConfigureAwait(false);
if (queued >= Math.Max(1, attestorOptions.Quotas.Bulk.MaxQueuedJobs))
{
return Results.Problem(statusCode: StatusCodes.Status429TooManyRequests, title: "Too many bulk verification jobs queued. Try again later.");
}
job = await jobStore.CreateAsync(job!, cancellationToken).ConfigureAwait(false);
var response = BulkVerificationContracts.MapJob(job);
return Results.Accepted($"/api/v1/rekor/verify:bulk/{job.Id}", response);
}).RequireAuthorization("attestor:write")
.RequireRateLimiting("attestor-bulk");
app.MapGet("/api/v1/rekor/verify:bulk/{jobId}", async (
string jobId,
HttpContext httpContext,
IBulkVerificationJobStore jobStore,
CancellationToken cancellationToken) =>
{
if (string.IsNullOrWhiteSpace(jobId))
{
return Results.NotFound();
}
var job = await jobStore.GetAsync(jobId, cancellationToken).ConfigureAwait(false);
if (job is null || !IsAuthorizedForJob(job, httpContext.User))
{
return Results.NotFound();
}
return Results.Ok(BulkVerificationContracts.MapJob(job));
}).RequireAuthorization("attestor:write");
app.MapAttestorEndpoints(attestorOptions);
// Refresh Router endpoint cache
app.TryRefreshStellaRouterEndpoints(routerOptions);
app.Run();
static async Task<IResult> GetAttestationDetailResultAsync(
string uuid,
bool refresh,
IAttestorVerificationService verificationService,
CancellationToken cancellationToken)
{
var entry = await verificationService.GetEntryAsync(uuid, refresh, cancellationToken).ConfigureAwait(false);
if (entry is null)
{
return Results.NotFound();
}
return Results.Ok(MapAttestationDetail(entry));
}
static object MapAttestationDetail(AttestorEntry entry)
{
return new
{
uuid = entry.RekorUuid,
index = entry.Index,
backend = entry.Log.Backend,
proof = entry.Proof is null ? null : new
{
checkpoint = entry.Proof.Checkpoint is null ? null : new
{
origin = entry.Proof.Checkpoint.Origin,
size = entry.Proof.Checkpoint.Size,
rootHash = entry.Proof.Checkpoint.RootHash,
timestamp = entry.Proof.Checkpoint.Timestamp?.ToString("O")
},
inclusion = entry.Proof.Inclusion is null ? null : new
{
leafHash = entry.Proof.Inclusion.LeafHash,
path = entry.Proof.Inclusion.Path
}
},
logURL = entry.Log.Url,
status = entry.Status,
mirror = entry.Mirror is null ? null : new
{
backend = entry.Mirror.Backend,
uuid = entry.Mirror.Uuid,
index = entry.Mirror.Index,
logURL = entry.Mirror.Url,
status = entry.Mirror.Status,
proof = entry.Mirror.Proof is null ? null : new
{
checkpoint = entry.Mirror.Proof.Checkpoint is null ? null : new
{
origin = entry.Mirror.Proof.Checkpoint.Origin,
size = entry.Mirror.Proof.Checkpoint.Size,
rootHash = entry.Mirror.Proof.Checkpoint.RootHash,
timestamp = entry.Mirror.Proof.Checkpoint.Timestamp?.ToString("O")
},
inclusion = entry.Mirror.Proof.Inclusion is null ? null : new
{
leafHash = entry.Mirror.Proof.Inclusion.LeafHash,
path = entry.Mirror.Proof.Inclusion.Path
}
},
error = entry.Mirror.Error
},
artifact = new
{
sha256 = entry.Artifact.Sha256,
kind = entry.Artifact.Kind,
imageDigest = entry.Artifact.ImageDigest,
subjectUri = entry.Artifact.SubjectUri
}
};
}
static AttestationListItemDto MapToListItem(AttestorEntry entry)
{
return new AttestationListItemDto
{
Uuid = entry.RekorUuid,
Status = entry.Status,
CreatedAt = entry.CreatedAt.ToString("O"),
Artifact = new AttestationArtifactDto
{
Sha256 = entry.Artifact.Sha256,
Kind = entry.Artifact.Kind,
ImageDigest = entry.Artifact.ImageDigest,
SubjectUri = entry.Artifact.SubjectUri
},
Signer = new AttestationSignerDto
{
Mode = entry.SignerIdentity.Mode,
Issuer = entry.SignerIdentity.Issuer,
Subject = entry.SignerIdentity.SubjectAlternativeName,
KeyId = entry.SignerIdentity.KeyId
},
Log = new AttestationLogDto
{
Backend = entry.Log.Backend,
Url = entry.Log.Url,
Index = entry.Index,
Status = entry.Status
},
Mirror = entry.Mirror is null ? null : new AttestationLogDto
{
Backend = entry.Mirror.Backend,
Url = entry.Mirror.Url,
Index = entry.Mirror.Index,
Status = entry.Mirror.Status
}
};
}
static SubmissionContext BuildSubmissionContext(ClaimsPrincipal user, X509Certificate2 certificate)
{
var subject = user.FindFirst("sub")?.Value ?? certificate.Subject;
var audience = user.FindFirst("aud")?.Value ?? string.Empty;
var clientId = user.FindFirst("client_id")?.Value;
var tenant = user.FindFirst("tenant")?.Value;
return new SubmissionContext
{
CallerSubject = subject,
CallerAudience = audience,
CallerClientId = clientId,
CallerTenant = tenant,
ClientCertificate = certificate,
MtlsThumbprint = certificate.Thumbprint
};
}
static BulkVerificationJobContext BuildBulkJobContext(ClaimsPrincipal user)
{
var scopes = user.FindAll("scope")
.Select(claim => claim.Value)
.Where(value => !string.IsNullOrWhiteSpace(value))
.ToList();
return new BulkVerificationJobContext
{
Tenant = user.FindFirst("tenant")?.Value,
RequestedBy = user.FindFirst("sub")?.Value,
ClientId = user.FindFirst("client_id")?.Value,
Scopes = scopes
};
}
static bool IsAuthorizedForJob(BulkVerificationJob job, ClaimsPrincipal user)
{
var tenant = user.FindFirst("tenant")?.Value;
if (!string.IsNullOrEmpty(job.Context.Tenant) &&
!string.Equals(job.Context.Tenant, tenant, StringComparison.Ordinal))
{
return false;
}
var subject = user.FindFirst("sub")?.Value;
if (!string.IsNullOrEmpty(job.Context.RequestedBy) &&
!string.Equals(job.Context.RequestedBy, subject, StringComparison.Ordinal))
{
return false;
}
return true;
}
static List<X509Certificate2> LoadClientCertificateAuthorities(string? path)
{
var certificates = new List<X509Certificate2>();
@@ -857,34 +455,6 @@ static IEnumerable<string> ExtractScopes(ClaimsPrincipal user)
}
}
static bool IsJsonContentType(string? contentType)
{
if (string.IsNullOrWhiteSpace(contentType))
{
return false;
}
var mediaType = contentType.Split(';', 2)[0].Trim();
if (mediaType.Length == 0)
{
return false;
}
return mediaType.EndsWith("/json", StringComparison.OrdinalIgnoreCase)
|| mediaType.Contains("+json", StringComparison.OrdinalIgnoreCase);
}
static IResult UnsupportedMediaTypeResult()
{
return Results.Problem(
statusCode: StatusCodes.Status415UnsupportedMediaType,
title: "Unsupported content type. Submit application/json payloads.",
extensions: new Dictionary<string, object?>
{
["code"] = "unsupported_media_type"
});
}
internal sealed class NoAuthHandler : AuthenticationHandler<AuthenticationSchemeOptions>
{
public const string SchemeName = "NoAuth";
@@ -909,3 +479,7 @@ internal sealed class NoAuthHandler : AuthenticationHandler<AuthenticationScheme
return Task.CompletedTask;
}
}
public partial class Program
{
}

View File

@@ -5,7 +5,7 @@
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.AspNetCore.OpenApi" />

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0072-M | DONE | Maintainability audit for StellaOps.Attestor.WebService. |
| AUDIT-0072-T | DONE | Test coverage audit for StellaOps.Attestor.WebService. |
| AUDIT-0072-A | TODO | Pending approval for changes. |
| AUDIT-0072-A | DOING | Addressing WebService audit findings. |

View File

@@ -28,6 +28,7 @@ public sealed class AttestationBundler : IAttestationBundler
private readonly IMerkleTreeBuilder _merkleBuilder;
private readonly ILogger<AttestationBundler> _logger;
private readonly BundlingOptions _options;
private readonly TimeProvider _timeProvider;
/// <summary>
/// Create a new attestation bundler.
@@ -38,7 +39,8 @@ public sealed class AttestationBundler : IAttestationBundler
IMerkleTreeBuilder merkleBuilder,
ILogger<AttestationBundler> logger,
IOptions<BundlingOptions> options,
IOrgKeySigner? orgSigner = null)
IOrgKeySigner? orgSigner = null,
TimeProvider? timeProvider = null)
{
_aggregator = aggregator ?? throw new ArgumentNullException(nameof(aggregator));
_store = store ?? throw new ArgumentNullException(nameof(store));
@@ -46,6 +48,7 @@ public sealed class AttestationBundler : IAttestationBundler
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new BundlingOptions();
_orgSigner = orgSigner;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -60,13 +63,42 @@ public sealed class AttestationBundler : IAttestationBundler
request.PeriodStart,
request.PeriodEnd);
// Collect attestations in deterministic order
var attestations = await CollectAttestationsAsync(request, cancellationToken);
if (attestations.Count == 0)
if (request.PeriodStart > request.PeriodEnd)
{
_logger.LogWarning("No attestations found for the specified period");
throw new InvalidOperationException("No attestations found for the specified period.");
throw new ArgumentException(
"PeriodStart must be less than or equal to PeriodEnd.",
nameof(request));
}
var effectivePeriodStart = request.PeriodStart;
var lookbackDays = _options.Aggregation.LookbackDays;
if (lookbackDays > 0)
{
var lookbackStart = request.PeriodEnd.AddDays(-lookbackDays);
if (effectivePeriodStart < lookbackStart)
{
_logger.LogDebug(
"Clamping period start from {RequestedStart} to {EffectiveStart} to honor lookback window.",
request.PeriodStart,
lookbackStart);
effectivePeriodStart = lookbackStart;
}
}
// Collect attestations in deterministic order
var attestations = await CollectAttestationsAsync(
request with { PeriodStart = effectivePeriodStart },
cancellationToken);
var minimumAttestations = Math.Max(1, _options.Aggregation.MinAttestationsForBundle);
if (attestations.Count < minimumAttestations)
{
_logger.LogWarning(
"Insufficient attestations for bundling. Required {Required}, found {Found}.",
minimumAttestations,
attestations.Count);
throw new InvalidOperationException(
$"Insufficient attestations for bundling. Required {minimumAttestations}, found {attestations.Count}.");
}
_logger.LogInformation("Collected {Count} attestations for bundling", attestations.Count);
@@ -83,8 +115,8 @@ public sealed class AttestationBundler : IAttestationBundler
{
BundleId = bundleId,
Version = "1.0",
CreatedAt = DateTimeOffset.UtcNow,
PeriodStart = request.PeriodStart,
CreatedAt = _timeProvider.GetUtcNow(),
PeriodStart = effectivePeriodStart,
PeriodEnd = request.PeriodEnd,
AttestationCount = attestations.Count,
TenantId = request.TenantId
@@ -104,6 +136,11 @@ public sealed class AttestationBundler : IAttestationBundler
};
// Sign with organization key if requested
if (request.SignWithOrgKey && _orgSigner == null)
{
throw new InvalidOperationException("Organization signer is not configured.");
}
if (request.SignWithOrgKey && _orgSigner != null)
{
bundle = await SignBundleAsync(bundle, request.OrgKeyId, cancellationToken);
@@ -146,14 +183,22 @@ public sealed class AttestationBundler : IAttestationBundler
ArgumentNullException.ThrowIfNull(bundle);
var issues = new List<BundleVerificationIssue>();
var verifiedAt = DateTimeOffset.UtcNow;
var verifiedAt = _timeProvider.GetUtcNow();
// Verify Merkle root
var merkleValid = VerifyMerkleRoot(bundle, issues);
// Verify org signature if present
bool? orgSigValid = null;
if (bundle.OrgSignature != null && _orgSigner != null)
if (bundle.OrgSignature != null && _orgSigner == null)
{
issues.Add(new BundleVerificationIssue(
VerificationIssueSeverity.Critical,
"ORG_SIG_VERIFIER_UNAVAILABLE",
"Organization signature present but no signer is configured for verification."));
orgSigValid = false;
}
else if (bundle.OrgSignature != null && _orgSigner != null)
{
orgSigValid = await VerifyOrgSignatureAsync(bundle, issues, cancellationToken);
}
@@ -236,11 +281,19 @@ public sealed class AttestationBundler : IAttestationBundler
keyId);
// Return bundle with signature and updated metadata
var fingerprint = await GetKeyFingerprintAsync(keyId, cancellationToken);
if (fingerprint == null)
{
_logger.LogWarning(
"Organization key fingerprint not found for key {KeyId}; leaving fingerprint unset.",
keyId);
}
return bundle with
{
Metadata = bundle.Metadata with
{
OrgKeyFingerprint = $"sha256:{ComputeKeyFingerprint(keyId)}"
OrgKeyFingerprint = fingerprint
},
OrgSignature = signature
};
@@ -328,10 +381,17 @@ public sealed class AttestationBundler : IAttestationBundler
return SHA256.HashData(Encoding.UTF8.GetBytes(sb.ToString()));
}
private static string ComputeKeyFingerprint(string keyId)
private async Task<string?> GetKeyFingerprintAsync(
string keyId,
CancellationToken cancellationToken)
{
// Simple fingerprint - in production this would use the actual public key
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(keyId));
return Convert.ToHexString(hash[..16]).ToLowerInvariant();
if (_orgSigner == null)
{
return null;
}
var keys = await _orgSigner.ListKeysAsync(cancellationToken) ?? Array.Empty<OrgKeyInfo>();
var match = keys.FirstOrDefault(key => string.Equals(key.KeyId, keyId, StringComparison.Ordinal));
return match?.Fingerprint;
}
}

View File

@@ -120,15 +120,18 @@ public sealed class OfflineKitBundleProvider : IOfflineKitBundleProvider
private readonly IBundleStore _bundleStore;
private readonly BundlingOptions _options;
private readonly ILogger<OfflineKitBundleProvider> _logger;
private readonly TimeProvider _timeProvider;
public OfflineKitBundleProvider(
IBundleStore bundleStore,
IOptions<BundlingOptions> options,
ILogger<OfflineKitBundleProvider> logger)
ILogger<OfflineKitBundleProvider> logger,
TimeProvider? timeProvider = null)
{
_bundleStore = bundleStore ?? throw new ArgumentNullException(nameof(bundleStore));
_options = options?.Value ?? new BundlingOptions();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
@@ -137,7 +140,7 @@ public sealed class OfflineKitBundleProvider : IOfflineKitBundleProvider
OfflineKitExportOptions? options = null,
CancellationToken cancellationToken = default)
{
options ??= new OfflineKitExportOptions();
options = ResolveExportOptions(options);
if (!_options.Export.IncludeInOfflineKit)
{
@@ -147,7 +150,7 @@ public sealed class OfflineKitBundleProvider : IOfflineKitBundleProvider
Bundles = [],
TotalAttestations = 0,
TotalSizeBytes = 0,
ExportedAt = DateTimeOffset.UtcNow
ExportedAt = _timeProvider.GetUtcNow()
};
}
@@ -203,7 +206,7 @@ public sealed class OfflineKitBundleProvider : IOfflineKitBundleProvider
Bundles = exportedBundles,
TotalAttestations = totalAttestations,
TotalSizeBytes = totalSize,
ExportedAt = DateTimeOffset.UtcNow
ExportedAt = _timeProvider.GetUtcNow()
};
}
@@ -212,9 +215,9 @@ public sealed class OfflineKitBundleProvider : IOfflineKitBundleProvider
OfflineKitExportOptions? options = null,
CancellationToken cancellationToken = default)
{
options ??= new OfflineKitExportOptions();
options = ResolveExportOptions(options);
var cutoffDate = DateTimeOffset.UtcNow.AddMonths(-options.MaxAgeMonths);
var cutoffDate = _timeProvider.GetUtcNow().AddMonths(-options.MaxAgeMonths);
var result = new List<BundleListItem>();
string? cursor = null;
@@ -303,4 +306,58 @@ public sealed class OfflineKitBundleProvider : IOfflineKitBundleProvider
return $"bundle-{hash}{extension}{compression}";
}
private OfflineKitExportOptions ResolveExportOptions(OfflineKitExportOptions? options)
{
if (options != null)
{
return options;
}
return new OfflineKitExportOptions
{
MaxAgeMonths = _options.Export.MaxAgeMonths,
Format = ParseFormat(_options.Export.SupportedFormats ?? new List<string>()),
Compression = ParseCompression(_options.Export.Compression),
RequireOrgSignature = false,
TenantId = null
};
}
private static BundleFormat ParseFormat(IList<string> supportedFormats)
{
if (supportedFormats.Count == 0)
{
return BundleFormat.Json;
}
var format = supportedFormats
.FirstOrDefault(value => value.Equals("json", StringComparison.OrdinalIgnoreCase))
?? supportedFormats.FirstOrDefault()
?? "json";
return format.Equals("cbor", StringComparison.OrdinalIgnoreCase)
? BundleFormat.Cbor
: BundleFormat.Json;
}
private static BundleCompression ParseCompression(string? compression)
{
if (string.IsNullOrWhiteSpace(compression))
{
return BundleCompression.None;
}
if (compression.Equals("gzip", StringComparison.OrdinalIgnoreCase))
{
return BundleCompression.Gzip;
}
if (compression.Equals("zstd", StringComparison.OrdinalIgnoreCase))
{
return BundleCompression.Zstd;
}
return BundleCompression.None;
}
}

View File

@@ -164,25 +164,28 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
private readonly IBundleExpiryNotifier? _notifier;
private readonly BundleRetentionOptions _options;
private readonly ILogger<RetentionPolicyEnforcer> _logger;
private readonly TimeProvider _timeProvider;
public RetentionPolicyEnforcer(
IBundleStore bundleStore,
IOptions<BundlingOptions> options,
ILogger<RetentionPolicyEnforcer> logger,
IBundleArchiver? archiver = null,
IBundleExpiryNotifier? notifier = null)
IBundleExpiryNotifier? notifier = null,
TimeProvider? timeProvider = null)
{
_bundleStore = bundleStore ?? throw new ArgumentNullException(nameof(bundleStore));
_options = options?.Value?.Retention ?? new BundleRetentionOptions();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_archiver = archiver;
_notifier = notifier;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
public async Task<RetentionEnforcementResult> EnforceAsync(CancellationToken cancellationToken = default)
{
var startedAt = DateTimeOffset.UtcNow;
var startedAt = _timeProvider.GetUtcNow();
var failures = new List<BundleEnforcementFailure>();
int evaluated = 0;
int deleted = 0;
@@ -196,7 +199,7 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
return new RetentionEnforcementResult
{
StartedAt = startedAt,
CompletedAt = DateTimeOffset.UtcNow,
CompletedAt = _timeProvider.GetUtcNow(),
BundlesEvaluated = 0,
BundlesDeleted = 0,
BundlesArchived = 0,
@@ -213,10 +216,11 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
// Process bundles in batches
string? cursor = null;
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var notificationCutoff = now.AddDays(_options.NotifyDaysBeforeExpiry);
var gracePeriodCutoff = now.AddDays(-_options.GracePeriodDays);
var expiredNotifications = new List<BundleExpiryNotification>();
var applyOverrides = _options.TenantOverrides.Count > 0 || _options.PredicateTypeOverrides.Count > 0;
do
{
@@ -227,7 +231,29 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
foreach (var bundle in listResult.Bundles)
{
evaluated++;
var expiryDate = CalculateExpiryDate(bundle);
string? tenantId = null;
IReadOnlyList<string>? predicateTypes = null;
if (applyOverrides)
{
var fullBundle = await _bundleStore.GetBundleAsync(bundle.BundleId, cancellationToken);
if (fullBundle == null)
{
failures.Add(new BundleEnforcementFailure(
bundle.BundleId,
"Bundle not found",
"Failed to load bundle metadata for retention overrides."));
continue;
}
tenantId = fullBundle.Metadata.TenantId;
predicateTypes = fullBundle.Attestations
.Select(attestation => attestation.PredicateType)
.Distinct(StringComparer.Ordinal)
.ToList();
}
var expiryDate = CalculateExpiryDate(tenantId, predicateTypes, bundle.CreatedAt);
// Check if bundle has expired
if (expiryDate <= now)
@@ -300,7 +326,7 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
}
}
var completedAt = DateTimeOffset.UtcNow;
var completedAt = _timeProvider.GetUtcNow();
_logger.LogInformation(
"Retention enforcement completed. Evaluated={Evaluated}, Deleted={Deleted}, Archived={Archived}, Marked={Marked}, Approaching={Approaching}, Failed={Failed}",
evaluated, deleted, archived, markedExpired, approachingExpiry, failures.Count);
@@ -324,9 +350,10 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
CancellationToken cancellationToken = default)
{
var notifications = new List<BundleExpiryNotification>();
var now = DateTimeOffset.UtcNow;
var now = _timeProvider.GetUtcNow();
var cutoff = now.AddDays(daysBeforeExpiry);
string? cursor = null;
var applyOverrides = _options.TenantOverrides.Count > 0 || _options.PredicateTypeOverrides.Count > 0;
do
{
@@ -336,7 +363,25 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
foreach (var bundle in listResult.Bundles)
{
var expiryDate = CalculateExpiryDate(bundle);
string? tenantId = null;
IReadOnlyList<string>? predicateTypes = null;
if (applyOverrides)
{
var fullBundle = await _bundleStore.GetBundleAsync(bundle.BundleId, cancellationToken);
if (fullBundle == null)
{
continue;
}
tenantId = fullBundle.Metadata.TenantId;
predicateTypes = fullBundle.Attestations
.Select(attestation => attestation.PredicateType)
.Distinct(StringComparer.Ordinal)
.ToList();
}
var expiryDate = CalculateExpiryDate(tenantId, predicateTypes, bundle.CreatedAt);
if (expiryDate > now && expiryDate <= cutoff)
{
notifications.Add(new BundleExpiryNotification(
@@ -364,17 +409,51 @@ public sealed class RetentionPolicyEnforcer : IRetentionPolicyEnforcer
/// <inheritdoc/>
public DateTimeOffset CalculateExpiryDate(string? tenantId, DateTimeOffset createdAt)
{
int retentionMonths = _options.DefaultMonths;
var retentionMonths = ResolveRetentionMonths(tenantId, null);
return createdAt.AddMonths(retentionMonths);
}
private DateTimeOffset CalculateExpiryDate(
string? tenantId,
IReadOnlyList<string>? predicateTypes,
DateTimeOffset createdAt)
{
var retentionMonths = ResolveRetentionMonths(tenantId, predicateTypes);
return createdAt.AddMonths(retentionMonths);
}
private int ResolveRetentionMonths(
string? tenantId,
IReadOnlyList<string>? predicateTypes)
{
var retentionMonths = ClampRetentionMonths(_options.DefaultMonths);
// Check for tenant-specific override
if (!string.IsNullOrEmpty(tenantId) &&
_options.TenantOverrides.TryGetValue(tenantId, out var tenantMonths))
{
retentionMonths = Math.Max(tenantMonths, _options.MinimumMonths);
retentionMonths = Math.Min(retentionMonths, _options.MaximumMonths);
retentionMonths = ClampRetentionMonths(tenantMonths);
}
return createdAt.AddMonths(retentionMonths);
if (predicateTypes != null && _options.PredicateTypeOverrides.Count > 0)
{
foreach (var predicateType in predicateTypes)
{
if (_options.PredicateTypeOverrides.TryGetValue(predicateType, out var predicateMonths))
{
retentionMonths = Math.Max(retentionMonths, ClampRetentionMonths(predicateMonths));
}
}
}
return retentionMonths;
}
private int ClampRetentionMonths(int months)
{
var clamped = Math.Max(months, _options.MinimumMonths);
return Math.Min(clamped, _options.MaximumMonths);
}
private async Task<(bool Success, BundleEnforcementFailure? Failure)> HandleExpiredBundleAsync(

View File

@@ -6,10 +6,10 @@
<Nullable>enable</Nullable>
<RootNamespace>StellaOps.Attestor.Bundling</RootNamespace>
<Description>Attestation bundle aggregation and rotation for long-term verification in air-gapped environments.</Description>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="Microsoft.Extensions.Options" />
</ItemGroup>

View File

@@ -1,24 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<RootNamespace>StellaOps.Attestor.Bundling</RootNamespace>
<Description>Attestation bundle aggregation and rotation for long-term verification in air-gapped environments.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" Version="2.6.2" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="..\StellaOps.Attestor.Bundle\StellaOps.Attestor.Bundle.csproj" />
</ItemGroup>
</Project>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0047-M | DONE | Maintainability audit for StellaOps.Attestor.Bundling. |
| AUDIT-0047-T | DONE | Test coverage audit for StellaOps.Attestor.Bundling. |
| AUDIT-0047-A | TODO | Pending approval for changes. |
| AUDIT-0047-A | DONE | Applied bundling validation, defaults, and test coverage updates. |

View File

@@ -39,6 +39,7 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
private readonly Func<string?, EnvelopeKey?> _keyResolver;
private readonly IRekorClient? _rekorClient;
private readonly GraphRootAttestorOptions _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<GraphRootAttestor> _logger;
/// <summary>
@@ -56,7 +57,8 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
Func<string?, EnvelopeKey?> keyResolver,
ILogger<GraphRootAttestor> logger,
IRekorClient? rekorClient = null,
IOptions<GraphRootAttestorOptions>? options = null)
IOptions<GraphRootAttestorOptions>? options = null,
TimeProvider? timeProvider = null)
{
_merkleComputer = merkleComputer ?? throw new ArgumentNullException(nameof(merkleComputer));
_signatureService = signatureService ?? throw new ArgumentNullException(nameof(signatureService));
@@ -64,6 +66,7 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_rekorClient = rekorClient;
_options = options?.Value ?? new GraphRootAttestorOptions();
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
@@ -91,14 +94,20 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
.OrderBy(x => x, StringComparer.Ordinal)
.ToList();
var normalizedPolicyDigest = NormalizeDigest(request.PolicyDigest);
var normalizedFeedsDigest = NormalizeDigest(request.FeedsDigest);
var normalizedToolchainDigest = NormalizeDigest(request.ToolchainDigest);
var normalizedParamsDigest = NormalizeDigest(request.ParamsDigest);
// 2. Build leaf data for Merkle tree
var leaves = BuildLeaves(
sortedNodeIds,
sortedEdgeIds,
request.PolicyDigest,
request.FeedsDigest,
request.ToolchainDigest,
request.ParamsDigest);
sortedEvidenceIds,
normalizedPolicyDigest,
normalizedFeedsDigest,
normalizedToolchainDigest,
normalizedParamsDigest);
// 3. Compute Merkle root
var rootBytes = _merkleComputer.ComputeRoot(leaves);
@@ -108,7 +117,7 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
_logger.LogDebug("Computed Merkle root: {RootHash}", rootHash);
// 4. Build in-toto statement
var computedAt = DateTimeOffset.UtcNow;
var computedAt = _timeProvider.GetUtcNow();
var attestation = BuildAttestation(
request,
sortedNodeIds,
@@ -116,6 +125,10 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
sortedEvidenceIds,
rootHash,
rootHex,
normalizedPolicyDigest,
normalizedFeedsDigest,
normalizedToolchainDigest,
normalizedParamsDigest,
computedAt);
// 5. Canonicalize the attestation
@@ -129,7 +142,7 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
$"Unable to resolve signing key: {request.SigningKeyId ?? "(default)"}");
}
var signResult = _signatureService.Sign(payload, key, ct);
var signResult = _signatureService.SignDsse(PayloadType, payload, key, ct);
if (!signResult.IsSuccess)
{
throw new InvalidOperationException(
@@ -260,8 +273,8 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
};
// Compute bundle hash
var bundleJson = JsonSerializer.Serialize(EnvDsseEnvelope);
var bundleHash = SHA256.HashData(Encoding.UTF8.GetBytes(bundleJson));
var bundleJson = CanonJson.Canonicalize(EnvDsseEnvelope);
var bundleHash = SHA256.HashData(bundleJson);
return new AttestorSubmissionRequest
{
@@ -303,6 +316,24 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
nodes.Count,
edges.Count);
if (!string.Equals(envelope.PayloadType, PayloadType, StringComparison.Ordinal))
{
return new GraphRootVerificationResult
{
IsValid = false,
FailureReason = $"Unexpected payloadType '{envelope.PayloadType}'."
};
}
if (!TryVerifyEnvelopeSignatures(envelope, ct, out var signatureFailure))
{
return new GraphRootVerificationResult
{
IsValid = false,
FailureReason = signatureFailure ?? "No valid DSSE signatures found."
};
}
// 1. Deserialize attestation from envelope payload
GraphRootAttestation? attestation;
try
@@ -336,15 +367,69 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
.Select(e => e.EdgeId)
.OrderBy(x => x, StringComparer.Ordinal)
.ToList();
var predicateNodeIds = attestation.Predicate.NodeIds?.ToList() ?? [];
var predicateEdgeIds = attestation.Predicate.EdgeIds?.ToList() ?? [];
var predicateEvidenceIds = attestation.Predicate.EvidenceIds?.ToList() ?? [];
if (!SequenceEqual(predicateNodeIds, recomputedNodeIds))
{
return new GraphRootVerificationResult
{
IsValid = false,
FailureReason = "Predicate node IDs do not match provided graph data."
};
}
if (!SequenceEqual(predicateEdgeIds, recomputedEdgeIds))
{
return new GraphRootVerificationResult
{
IsValid = false,
FailureReason = "Predicate edge IDs do not match provided graph data."
};
}
var sortedPredicateEvidenceIds = predicateEvidenceIds
.OrderBy(x => x, StringComparer.Ordinal)
.ToList();
if (!SequenceEqual(predicateEvidenceIds, sortedPredicateEvidenceIds))
{
return new GraphRootVerificationResult
{
IsValid = false,
FailureReason = "Predicate evidence IDs are not in deterministic order."
};
}
string normalizedPolicyDigest;
string normalizedFeedsDigest;
string normalizedToolchainDigest;
string normalizedParamsDigest;
try
{
normalizedPolicyDigest = NormalizeDigest(attestation.Predicate.Inputs.PolicyDigest);
normalizedFeedsDigest = NormalizeDigest(attestation.Predicate.Inputs.FeedsDigest);
normalizedToolchainDigest = NormalizeDigest(attestation.Predicate.Inputs.ToolchainDigest);
normalizedParamsDigest = NormalizeDigest(attestation.Predicate.Inputs.ParamsDigest);
}
catch (ArgumentException ex)
{
return new GraphRootVerificationResult
{
IsValid = false,
FailureReason = $"Invalid predicate digest: {ex.Message}"
};
}
// 3. Build leaves using the same inputs from the attestation
var leaves = BuildLeaves(
recomputedNodeIds,
recomputedEdgeIds,
attestation.Predicate.Inputs.PolicyDigest,
attestation.Predicate.Inputs.FeedsDigest,
attestation.Predicate.Inputs.ToolchainDigest,
attestation.Predicate.Inputs.ParamsDigest);
sortedPredicateEvidenceIds,
normalizedPolicyDigest,
normalizedFeedsDigest,
normalizedToolchainDigest,
normalizedParamsDigest);
// 4. Compute Merkle root
var recomputedRootBytes = _merkleComputer.ComputeRoot(leaves);
@@ -385,13 +470,14 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
private static List<ReadOnlyMemory<byte>> BuildLeaves(
IReadOnlyList<string> sortedNodeIds,
IReadOnlyList<string> sortedEdgeIds,
IReadOnlyList<string> sortedEvidenceIds,
string policyDigest,
string feedsDigest,
string toolchainDigest,
string paramsDigest)
{
var leaves = new List<ReadOnlyMemory<byte>>(
sortedNodeIds.Count + sortedEdgeIds.Count + 4);
sortedNodeIds.Count + sortedEdgeIds.Count + sortedEvidenceIds.Count + 4);
// Add node IDs
foreach (var nodeId in sortedNodeIds)
@@ -405,6 +491,12 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
leaves.Add(Encoding.UTF8.GetBytes(edgeId));
}
// Add evidence IDs
foreach (var evidenceId in sortedEvidenceIds)
{
leaves.Add(Encoding.UTF8.GetBytes(evidenceId));
}
// Add input digests (deterministic order)
leaves.Add(Encoding.UTF8.GetBytes(policyDigest));
leaves.Add(Encoding.UTF8.GetBytes(feedsDigest));
@@ -421,6 +513,10 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
IReadOnlyList<string> sortedEvidenceIds,
string rootHash,
string rootHex,
string policyDigest,
string feedsDigest,
string toolchainDigest,
string paramsDigest,
DateTimeOffset computedAt)
{
var subjects = new List<GraphRootSubject>
@@ -457,10 +553,10 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
EdgeIds = sortedEdgeIds,
Inputs = new GraphInputDigests
{
PolicyDigest = request.PolicyDigest,
FeedsDigest = request.FeedsDigest,
ToolchainDigest = request.ToolchainDigest,
ParamsDigest = request.ParamsDigest
PolicyDigest = policyDigest,
FeedsDigest = feedsDigest,
ToolchainDigest = toolchainDigest,
ParamsDigest = paramsDigest
},
EvidenceIds = sortedEvidenceIds,
CanonVersion = CanonVersion.Current,
@@ -476,13 +572,13 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
var colonIndex = digest.IndexOf(':');
if (colonIndex > 0 && colonIndex < digest.Length - 1)
{
var algorithm = digest[..colonIndex];
var value = digest[(colonIndex + 1)..];
var algorithm = digest[..colonIndex].ToLowerInvariant();
var value = digest[(colonIndex + 1)..].ToLowerInvariant();
return new Dictionary<string, string> { [algorithm] = value };
}
// Assume sha256 if no algorithm prefix
return new Dictionary<string, string> { ["sha256"] = digest };
return new Dictionary<string, string> { ["sha256"] = digest.ToLowerInvariant() };
}
private static string GetToolVersion()
@@ -493,4 +589,104 @@ public sealed class GraphRootAttestor : IGraphRootAttestor
?? "1.0.0";
return version;
}
private bool TryVerifyEnvelopeSignatures(
EnvDsseEnvelope envelope,
CancellationToken ct,
out string? failureReason)
{
if (envelope.Signatures.Count == 0)
{
failureReason = "Envelope does not contain signatures.";
return false;
}
foreach (var signature in envelope.Signatures)
{
ct.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(signature.KeyId))
{
continue;
}
var key = _keyResolver(signature.KeyId);
if (key is null)
{
continue;
}
if (!string.Equals(signature.KeyId, key.KeyId, StringComparison.Ordinal))
{
continue;
}
if (!TryDecodeSignature(signature.Signature, out var signatureBytes))
{
continue;
}
var envelopeSignature = new EnvelopeSignature(signature.KeyId, key.AlgorithmId, signatureBytes);
var verified = _signatureService.VerifyDsse(envelope.PayloadType, envelope.Payload.Span, envelopeSignature, key, ct);
if (verified.IsSuccess)
{
failureReason = null;
return true;
}
}
failureReason = "DSSE signature verification failed.";
return false;
}
private static bool TryDecodeSignature(string signature, out byte[] signatureBytes)
{
try
{
signatureBytes = Convert.FromBase64String(signature);
return signatureBytes.Length > 0;
}
catch (FormatException)
{
signatureBytes = [];
return false;
}
}
private static bool SequenceEqual(IReadOnlyList<string> left, IReadOnlyList<string> right)
{
if (left.Count != right.Count)
{
return false;
}
for (var i = 0; i < left.Count; i++)
{
if (!string.Equals(left[i], right[i], StringComparison.Ordinal))
{
return false;
}
}
return true;
}
private static string NormalizeDigest(string digest)
{
if (string.IsNullOrWhiteSpace(digest))
{
throw new ArgumentException("Digest must be provided.", nameof(digest));
}
var trimmed = digest.Trim();
var colonIndex = trimmed.IndexOf(':');
if (colonIndex > 0 && colonIndex < trimmed.Length - 1)
{
var algorithm = trimmed[..colonIndex].ToLowerInvariant();
var value = trimmed[(colonIndex + 1)..].ToLowerInvariant();
return $"{algorithm}:{value}";
}
return $"sha256:{trimmed.ToLowerInvariant()}";
}
}

View File

@@ -1,3 +1,4 @@
using System;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using StellaOps.Attestor.Envelope;
@@ -18,6 +19,7 @@ public static class GraphRootServiceCollectionExtensions
{
services.TryAddSingleton<IMerkleRootComputer, Sha256MerkleRootComputer>();
services.TryAddSingleton<EnvelopeSignatureService>();
services.TryAddSingleton(TimeProvider.System);
services.TryAddSingleton<IGraphRootAttestor, GraphRootAttestor>();
return services;
@@ -37,14 +39,16 @@ public static class GraphRootServiceCollectionExtensions
services.TryAddSingleton<IMerkleRootComputer, Sha256MerkleRootComputer>();
services.TryAddSingleton<EnvelopeSignatureService>();
services.TryAddSingleton(TimeProvider.System);
services.AddSingleton<IGraphRootAttestor>(sp =>
{
var merkleComputer = sp.GetRequiredService<IMerkleRootComputer>();
var signatureService = sp.GetRequiredService<EnvelopeSignatureService>();
var logger = sp.GetRequiredService<Microsoft.Extensions.Logging.ILogger<GraphRootAttestor>>();
var resolver = keyResolver(sp);
var timeProvider = sp.GetService<TimeProvider>();
return new GraphRootAttestor(merkleComputer, signatureService, resolver, logger);
return new GraphRootAttestor(merkleComputer, signatureService, resolver, logger, timeProvider: timeProvider);
});
return services;

View File

@@ -6,6 +6,7 @@
<Nullable>enable</Nullable>
<RootNamespace>StellaOps.Attestor.GraphRoot</RootNamespace>
<Description>Graph root attestation service for creating and verifying DSSE attestations of Merkle graph roots.</Description>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -1,27 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<RootNamespace>StellaOps.Attestor.GraphRoot</RootNamespace>
<Description>Graph root attestation service for creating and verifying DSSE attestations of Merkle graph roots.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.1" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Evidence.Core\StellaOps.Evidence.Core.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0053-M | DONE | Maintainability audit for StellaOps.Attestor.GraphRoot. |
| AUDIT-0053-T | DONE | Test coverage audit for StellaOps.Attestor.GraphRoot. |
| AUDIT-0053-A | TODO | Pending approval for changes. |
| AUDIT-0053-A | DONE | Applied audit remediation for graph root attestation. |

View File

@@ -91,9 +91,20 @@ public sealed record OciReference
/// <summary>
/// Gets the full reference string.
/// </summary>
public string FullReference => Tag is not null
? $"{Registry}/{Repository}:{Tag}"
: $"{Registry}/{Repository}@{Digest}";
public string FullReference
{
get
{
var baseRef = $"{Registry}/{Repository}";
if (!string.IsNullOrWhiteSpace(Digest))
{
return $"{baseRef}@{Digest}";
}
var tag = string.IsNullOrWhiteSpace(Tag) ? "latest" : Tag;
return $"{baseRef}:{tag}";
}
}
/// <summary>
/// Parses an OCI reference string.
@@ -102,45 +113,43 @@ public sealed record OciReference
{
ArgumentException.ThrowIfNullOrWhiteSpace(reference);
// Handle digest references: registry/repo@sha256:...
string? digest = null;
var name = reference;
var digestIndex = reference.IndexOf('@');
if (digestIndex > 0)
if (digestIndex >= 0)
{
var beforeDigest = reference[..digestIndex];
var digest = reference[(digestIndex + 1)..];
var (registry, repo) = ParseRegistryAndRepo(beforeDigest);
return new OciReference
{
Registry = registry,
Repository = repo,
Digest = digest
};
}
// Handle tag references: registry/repo:tag
var tagIndex = reference.LastIndexOf(':');
if (tagIndex > 0)
{
var beforeTag = reference[..tagIndex];
var tag = reference[(tagIndex + 1)..];
// Check if this is actually a port number
if (!beforeTag.Contains('/') || tag.Contains('/'))
name = reference[..digestIndex];
digest = reference[(digestIndex + 1)..];
if (string.IsNullOrWhiteSpace(digest))
{
throw new ArgumentException($"Invalid OCI reference: {reference}", nameof(reference));
}
var (registry, repo) = ParseRegistryAndRepo(beforeTag);
return new OciReference
{
Registry = registry,
Repository = repo,
Digest = string.Empty, // Will be resolved
Tag = tag
};
}
throw new ArgumentException($"Invalid OCI reference: {reference}", nameof(reference));
string? tag = null;
var tagIndex = name.LastIndexOf(':');
var slashIndex = name.LastIndexOf('/');
if (tagIndex > slashIndex)
{
tag = name[(tagIndex + 1)..];
name = name[..tagIndex];
}
if (string.IsNullOrWhiteSpace(tag) && string.IsNullOrWhiteSpace(digest))
{
tag = "latest";
}
var (registry, repo) = ParseRegistryAndRepo(name);
return new OciReference
{
Registry = registry,
Repository = repo,
Digest = digest ?? string.Empty,
Tag = tag
};
}
private static (string Registry, string Repo) ParseRegistryAndRepo(string reference)
@@ -148,13 +157,35 @@ public sealed record OciReference
var firstSlash = reference.IndexOf('/');
if (firstSlash < 0)
{
throw new ArgumentException($"Invalid OCI reference: {reference}");
return ("docker.io", NormalizeRepository("docker.io", reference));
}
var registry = reference[..firstSlash];
var repo = reference[(firstSlash + 1)..];
var firstSegment = reference[..firstSlash];
if (IsRegistryHost(firstSegment))
{
var repo = reference[(firstSlash + 1)..];
return (firstSegment, NormalizeRepository(firstSegment, repo));
}
return (registry, repo);
return ("docker.io", NormalizeRepository("docker.io", reference));
}
private static bool IsRegistryHost(string value)
{
return value.Contains('.', StringComparison.Ordinal)
|| value.Contains(':', StringComparison.Ordinal)
|| string.Equals(value, "localhost", StringComparison.OrdinalIgnoreCase);
}
private static string NormalizeRepository(string registry, string repository)
{
if (string.Equals(registry, "docker.io", StringComparison.OrdinalIgnoreCase)
&& !repository.Contains('/', StringComparison.Ordinal))
{
return $"library/{repository}";
}
return repository;
}
}

View File

@@ -19,19 +19,16 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
{
private readonly IOciRegistryClient _registryClient;
private readonly ILogger<OrasAttestationAttacher> _logger;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
private readonly TimeProvider _timeProvider;
public OrasAttestationAttacher(
IOciRegistryClient registryClient,
ILogger<OrasAttestationAttacher> logger)
ILogger<OrasAttestationAttacher> logger,
TimeProvider? timeProvider = null)
{
_registryClient = registryClient ?? throw new ArgumentNullException(nameof(registryClient));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc/>
@@ -46,6 +43,8 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
options ??= new AttachmentOptions();
var predicateType = ResolvePredicateType(attestation);
_logger.LogInformation(
"Attaching attestation to {Registry}/{Repository}@{Digest}",
imageRef.Registry,
@@ -66,18 +65,18 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
{
var existing = await FindExistingAttestationAsync(
imageRef,
attestation.PayloadType,
predicateType,
ct).ConfigureAwait(false);
if (existing is not null)
{
_logger.LogWarning(
"Attestation with predicate type {PredicateType} already exists at {Digest}",
attestation.PayloadType,
predicateType,
TruncateDigest(existing.Digest));
throw new InvalidOperationException(
$"Attestation with predicate type '{attestation.PayloadType}' already exists. " +
$"Attestation with predicate type '{predicateType}' already exists. " +
"Use ReplaceExisting=true to overwrite.");
}
}
@@ -104,7 +103,8 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
ct).ConfigureAwait(false);
// 5. Build manifest with subject reference
var annotations = BuildAnnotations(attestation, options);
var attachedAt = _timeProvider.GetUtcNow();
var annotations = BuildAnnotations(attestation, predicateType, options, attachedAt);
var manifest = new OciManifest
{
SchemaVersion = 2,
@@ -131,7 +131,7 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
Size = attestationBytes.Length,
Annotations = new Dictionary<string, string>
{
[AnnotationKeys.PredicateType] = attestation.PayloadType
[AnnotationKeys.PredicateType] = predicateType
}
}
],
@@ -153,11 +153,17 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
TruncateDigest(imageRef.Digest),
TruncateDigest(manifestDigest));
if (options.RecordInRekor)
{
_logger.LogWarning("RecordInRekor requested but Rekor integration is not configured for OCI attachments.");
}
return new AttachmentResult
{
AttestationDigest = attestationDigest,
AttestationRef = $"{imageRef.Registry}/{imageRef.Repository}@{manifestDigest}",
AttachedAt = DateTimeOffset.UtcNow
AttachedAt = attachedAt,
RekorLogId = null
};
}
@@ -259,7 +265,17 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
return null;
}
var layerDigest = manifest.Layers[0].Digest;
var layer = manifest.Layers.FirstOrDefault(l =>
string.Equals(l.MediaType, MediaTypes.DsseEnvelope, StringComparison.Ordinal));
if (layer is null)
{
_logger.LogWarning(
"Attestation manifest {Digest} has no DSSE envelope layer",
TruncateDigest(target.Digest));
return null;
}
var layerDigest = layer.Digest;
// Fetch the attestation blob
var blobBytes = await _registryClient.FetchBlobAsync(
@@ -305,12 +321,14 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
private static Dictionary<string, string> BuildAnnotations(
DsseEnvelope envelope,
AttachmentOptions options)
string predicateType,
AttachmentOptions options,
DateTimeOffset createdAt)
{
var annotations = new Dictionary<string, string>
var annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
[AnnotationKeys.Created] = DateTimeOffset.UtcNow.ToString("O"),
[AnnotationKeys.PredicateType] = envelope.PayloadType,
[AnnotationKeys.Created] = createdAt.ToString("O"),
[AnnotationKeys.PredicateType] = predicateType,
[AnnotationKeys.CosignSignature] = "" // Cosign compatibility placeholder
};
@@ -351,7 +369,7 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
private static DsseEnvelope DeserializeEnvelope(ReadOnlyMemory<byte> bytes)
{
// Parse the compact DSSE envelope format
var json = JsonDocument.Parse(bytes);
using var json = JsonDocument.Parse(bytes);
var root = json.RootElement;
var payloadType = root.GetProperty("payloadType").GetString()
@@ -360,7 +378,15 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
var payloadBase64 = root.GetProperty("payload").GetString()
?? throw new InvalidOperationException("Missing payload");
var payload = Convert.FromBase64String(payloadBase64);
byte[] payload;
try
{
payload = Convert.FromBase64String(payloadBase64);
}
catch (FormatException ex)
{
throw new InvalidOperationException("Attestation payload is not valid base64.", ex);
}
var signatures = new List<DsseSignature>();
if (root.TryGetProperty("signatures", out var sigsElement))
@@ -381,6 +407,41 @@ public sealed class OrasAttestationAttacher : IOciAttestationAttacher
return new DsseEnvelope(payloadType, payload, signatures);
}
private static string ResolvePredicateType(DsseEnvelope envelope)
{
if (TryGetPredicateType(envelope.Payload.Span, out var predicateType))
{
return predicateType;
}
return envelope.PayloadType;
}
private static bool TryGetPredicateType(ReadOnlySpan<byte> payload, out string predicateType)
{
try
{
using var json = JsonDocument.Parse(payload.ToArray());
if (json.RootElement.TryGetProperty("predicateType", out var predicateElement)
&& predicateElement.ValueKind == JsonValueKind.String)
{
var value = predicateElement.GetString();
if (!string.IsNullOrWhiteSpace(value))
{
predicateType = value;
return true;
}
}
}
catch (JsonException)
{
// Swallow and fallback to payload type
}
predicateType = string.Empty;
return false;
}
private static string ComputeDigest(ReadOnlySpan<byte> content)
{
var hash = SHA256.HashData(content);

View File

@@ -6,6 +6,7 @@
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<RootNamespace>StellaOps.Attestor.Oci</RootNamespace>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -7,4 +7,5 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0056-M | DONE | Maintainability audit for StellaOps.Attestor.Oci. |
| AUDIT-0056-T | DONE | Test coverage audit for StellaOps.Attestor.Oci. |
| AUDIT-0056-A | TODO | Pending approval for changes. |
| AUDIT-0056-A | DONE | Applied audit remediation for OCI attacher and references. |
| VAL-SMOKE-001 | DONE | Fixed build issue in Attestor OCI attacher. |

View File

@@ -6,6 +6,7 @@
<Nullable>enable</Nullable>
<RootNamespace>StellaOps.Attestor.Offline</RootNamespace>
<Description>Offline verification of attestation bundles for air-gapped environments.</Description>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -1,26 +0,0 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<RootNamespace>StellaOps.Attestor.Offline</RootNamespace>
<Description>Offline verification of attestation bundles for air-gapped environments.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" Version="2.6.2" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.1" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="..\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\StellaOps.Attestor.Bundle\StellaOps.Attestor.Bundle.csproj" />
<ProjectReference Include="..\StellaOps.Attestor.Bundling\StellaOps.Attestor.Bundling.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Verify\StellaOps.Attestor.Verify.csproj" />
</ItemGroup>
</Project>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0058-M | DONE | Maintainability audit for StellaOps.Attestor.Offline. |
| AUDIT-0058-T | DONE | Test coverage audit for StellaOps.Attestor.Offline. |
| AUDIT-0058-A | TODO | Pending approval for changes. |
| AUDIT-0058-A | DOING | Pending approval for changes. |

View File

@@ -50,7 +50,7 @@ public class AuditLogEntity
/// Additional details about the operation.
/// </summary>
[Column("details", TypeName = "jsonb")]
public JsonDocument? Details { get; set; }
public JsonElement? Details { get; set; }
/// <summary>
/// When this log entry was created.

View File

@@ -53,7 +53,7 @@ public class RekorEntryEntity
/// </summary>
[Required]
[Column("inclusion_proof", TypeName = "jsonb")]
public JsonDocument InclusionProof { get; set; } = null!;
public JsonElement InclusionProof { get; set; }
/// <summary>
/// When this record was created.

View File

@@ -16,7 +16,7 @@ function Resolve-RepoRoot {
$repoRoot = Resolve-RepoRoot
$perfDir = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf"
$migrationFile = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Migrations/20251214000001_AddProofChainSchema.sql"
$migrationFile = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Migrations/001_initial_schema.sql"
$seedFile = Join-Path $perfDir "seed.sql"
$queriesFile = Join-Path $perfDir "queries.sql"
$reportFile = Join-Path $repoRoot "docs/db/reports/proofchain-schema-perf-2025-12-17.md"

View File

@@ -57,6 +57,9 @@ public class ProofChainDbContext : DbContext
entity.HasIndex(e => e.Purl).HasDatabaseName("idx_sbom_entries_purl");
entity.HasIndex(e => e.ArtifactDigest).HasDatabaseName("idx_sbom_entries_artifact");
entity.HasIndex(e => e.TrustAnchorId).HasDatabaseName("idx_sbom_entries_anchor");
entity.Property(e => e.CreatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAdd();
// Unique constraint
entity.HasIndex(e => new { e.BomDigest, e.Purl, e.Version })
@@ -87,6 +90,9 @@ public class ProofChainDbContext : DbContext
.HasDatabaseName("idx_dsse_entry_predicate");
entity.HasIndex(e => e.SignerKeyId).HasDatabaseName("idx_dsse_signer");
entity.HasIndex(e => e.BodyHash).HasDatabaseName("idx_dsse_body_hash");
entity.Property(e => e.CreatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAdd();
// Unique constraint
entity.HasIndex(e => new { e.EntryId, e.PredicateType, e.BodyHash })
@@ -100,6 +106,9 @@ public class ProofChainDbContext : DbContext
entity.HasIndex(e => e.BundleId).HasDatabaseName("idx_spines_bundle").IsUnique();
entity.HasIndex(e => e.AnchorId).HasDatabaseName("idx_spines_anchor");
entity.HasIndex(e => e.PolicyVersion).HasDatabaseName("idx_spines_policy");
entity.Property(e => e.CreatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAdd();
entity.HasOne(e => e.Anchor)
.WithMany()
@@ -114,6 +123,12 @@ public class ProofChainDbContext : DbContext
entity.HasIndex(e => e.IsActive)
.HasDatabaseName("idx_trust_anchors_active")
.HasFilter("is_active = TRUE");
entity.Property(e => e.CreatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAdd();
entity.Property(e => e.UpdatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAddOrUpdate();
});
// RekorEntryEntity configuration
@@ -123,6 +138,9 @@ public class ProofChainDbContext : DbContext
entity.HasIndex(e => e.LogId).HasDatabaseName("idx_rekor_log_id");
entity.HasIndex(e => e.Uuid).HasDatabaseName("idx_rekor_uuid");
entity.HasIndex(e => e.EnvId).HasDatabaseName("idx_rekor_env");
entity.Property(e => e.CreatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAdd();
entity.HasOne(e => e.Envelope)
.WithOne(e => e.RekorEntry)
@@ -138,6 +156,70 @@ public class ProofChainDbContext : DbContext
entity.HasIndex(e => e.CreatedAt)
.HasDatabaseName("idx_audit_created")
.IsDescending();
entity.Property(e => e.CreatedAt)
.HasDefaultValueSql("NOW()")
.ValueGeneratedOnAdd();
});
}
public override int SaveChanges()
{
NormalizeTrackedArrays();
return base.SaveChanges();
}
public override Task<int> SaveChangesAsync(CancellationToken cancellationToken = default)
{
NormalizeTrackedArrays();
return base.SaveChangesAsync(cancellationToken);
}
private void NormalizeTrackedArrays()
{
foreach (var entry in ChangeTracker.Entries<SpineEntity>())
{
if (entry.State is EntityState.Added or EntityState.Modified)
{
entry.Entity.EvidenceIds = NormalizeEvidenceIds(entry.Entity.EvidenceIds);
}
}
foreach (var entry in ChangeTracker.Entries<TrustAnchorEntity>())
{
if (entry.State is EntityState.Added or EntityState.Modified)
{
entry.Entity.AllowedKeyIds = NormalizeKeyIds(entry.Entity.AllowedKeyIds);
}
}
}
private static string[] NormalizeEvidenceIds(string[] evidenceIds)
{
if (evidenceIds.Length == 0)
{
return evidenceIds;
}
return evidenceIds
.Select(id => id.Trim())
.Where(id => !string.IsNullOrEmpty(id))
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToArray();
}
private static string[] NormalizeKeyIds(string[] keyIds)
{
if (keyIds.Length == 0)
{
return keyIds;
}
return keyIds
.Select(id => id.Trim())
.Where(id => !string.IsNullOrEmpty(id))
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(id => id, StringComparer.OrdinalIgnoreCase)
.ToArray();
}
}

View File

@@ -59,7 +59,8 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
private readonly ILogger<TrustAnchorMatcher> _logger;
// Cache compiled regex patterns
private readonly Dictionary<string, Regex> _patternCache = new();
private const int MaxRegexCacheSize = 1024;
private readonly Dictionary<string, Regex> _patternCache = new(StringComparer.OrdinalIgnoreCase);
private readonly Lock _cacheLock = new();
public TrustAnchorMatcher(
@@ -92,7 +93,7 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
{
var specificity = CalculateSpecificity(anchor.PurlPattern);
if (bestMatch == null || specificity > bestMatch.Specificity)
if (IsBetterMatch(anchor, specificity, bestMatch))
{
bestMatch = new TrustAnchorMatchResult
{
@@ -190,6 +191,11 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
var regexPattern = ConvertGlobToRegex(pattern);
var regex = new Regex(regexPattern, RegexOptions.IgnoreCase | RegexOptions.Compiled);
if (_patternCache.Count >= MaxRegexCacheSize)
{
_patternCache.Clear();
}
_patternCache[pattern] = regex;
return regex;
}
@@ -284,4 +290,36 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
}
return true;
}
private static bool IsBetterMatch(
TrustAnchorEntity candidate,
int specificity,
TrustAnchorMatchResult? bestMatch)
{
if (bestMatch == null)
{
return true;
}
if (specificity != bestMatch.Specificity)
{
return specificity > bestMatch.Specificity;
}
var candidatePattern = candidate.PurlPattern ?? string.Empty;
var bestPattern = bestMatch.MatchedPattern ?? string.Empty;
if (candidatePattern.Length != bestPattern.Length)
{
return candidatePattern.Length > bestPattern.Length;
}
var patternCompare = string.Compare(candidatePattern, bestPattern, StringComparison.OrdinalIgnoreCase);
if (patternCompare != 0)
{
return patternCompare < 0;
}
return candidate.AnchorId.CompareTo(bestMatch.Anchor.AnchorId) < 0;
}
}

View File

@@ -7,6 +7,7 @@
<LangVersion>preview</LangVersion>
<RootNamespace>StellaOps.Attestor.Persistence</RootNamespace>
<Description>Proof chain persistence layer with Entity Framework Core and PostgreSQL support.</Description>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0060-M | DONE | Maintainability audit for StellaOps.Attestor.Persistence. |
| AUDIT-0060-T | DONE | Test coverage audit for StellaOps.Attestor.Persistence. |
| AUDIT-0060-A | TODO | Pending approval for changes. |
| AUDIT-0060-A | DONE | Applied defaults, normalization, deterministic matching, perf script, tests. |

View File

@@ -19,10 +19,17 @@ public sealed class AuditHashLogger
{
private readonly ILogger<AuditHashLogger> _logger;
private readonly bool _enableDetailedLogging;
private readonly TimeProvider _timeProvider;
public AuditHashLogger(ILogger<AuditHashLogger> logger, bool enableDetailedLogging = false)
: this(logger, TimeProvider.System, enableDetailedLogging)
{
}
public AuditHashLogger(ILogger<AuditHashLogger> logger, TimeProvider timeProvider, bool enableDetailedLogging = false)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_enableDetailedLogging = enableDetailedLogging;
}
@@ -91,7 +98,7 @@ public sealed class AuditHashLogger
RawSizeBytes = rawBytes.Length,
CanonicalSizeBytes = canonicalBytes.Length,
HashesMatch = rawHash.Equals(canonicalHash, StringComparison.Ordinal),
Timestamp = DateTimeOffset.UtcNow,
Timestamp = _timeProvider.GetUtcNow(),
CorrelationId = correlationId
};

View File

@@ -1,5 +1,6 @@
namespace StellaOps.Attestor.ProofChain.Generators;
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Canonical.Json;
@@ -26,11 +27,33 @@ public sealed class BackportProofGenerator
string fixedVersion,
DateTimeOffset advisoryDate,
JsonDocument advisoryData)
{
return FromDistroAdvisory(
cveId,
packagePurl,
advisorySource,
advisoryId,
fixedVersion,
advisoryDate,
advisoryData,
TimeProvider.System);
}
public static ProofBlob FromDistroAdvisory(
string cveId,
string packagePurl,
string advisorySource,
string advisoryId,
string fixedVersion,
DateTimeOffset advisoryDate,
JsonDocument advisoryData,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:distro:{advisorySource}:{advisoryId}";
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(advisoryData));
var dataElement = advisoryData.RootElement.Clone();
var dataHash = ComputeDataHash(advisoryData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
@@ -38,7 +61,7 @@ public sealed class BackportProofGenerator
Type = EvidenceType.DistroAdvisory,
Source = advisorySource,
Timestamp = advisoryDate,
Data = advisoryData,
Data = dataElement,
DataHash = dataHash
};
@@ -47,12 +70,12 @@ public sealed class BackportProofGenerator
ProofId = "", // Will be computed
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "distro_advisory_tier1",
Confidence = 0.98, // Highest confidence - authoritative source
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -66,12 +89,22 @@ public sealed class BackportProofGenerator
string packagePurl,
ChangelogEntry changelogEntry,
string changelogSource)
{
return FromChangelog(cveId, packagePurl, changelogEntry, changelogSource, TimeProvider.System);
}
public static ProofBlob FromChangelog(
string cveId,
string packagePurl,
ChangelogEntry changelogEntry,
string changelogSource,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:changelog:{changelogSource}:{changelogEntry.Version}";
var changelogData = JsonDocument.Parse(JsonSerializer.Serialize(changelogEntry));
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(changelogData));
var changelogData = SerializeToElement(changelogEntry, out var changelogBytes);
var dataHash = ComputeDataHash(changelogBytes);
var evidence = new ProofEvidence
{
@@ -88,12 +121,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "changelog_mention_tier2",
Confidence = changelogEntry.Confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -106,12 +139,21 @@ public sealed class BackportProofGenerator
string cveId,
string packagePurl,
PatchHeaderParseResult patchResult)
{
return FromPatchHeader(cveId, packagePurl, patchResult, TimeProvider.System);
}
public static ProofBlob FromPatchHeader(
string cveId,
string packagePurl,
PatchHeaderParseResult patchResult,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:patch_header:{patchResult.PatchFilePath}";
var patchData = JsonDocument.Parse(JsonSerializer.Serialize(patchResult));
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(patchData));
var patchData = SerializeToElement(patchResult, out var patchBytes);
var dataHash = ComputeDataHash(patchBytes);
var evidence = new ProofEvidence
{
@@ -128,12 +170,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "patch_header_tier3",
Confidence = patchResult.Confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -147,12 +189,22 @@ public sealed class BackportProofGenerator
string packagePurl,
PatchSignature patchSig,
bool exactMatch)
{
return FromPatchSignature(cveId, packagePurl, patchSig, exactMatch, TimeProvider.System);
}
public static ProofBlob FromPatchSignature(
string cveId,
string packagePurl,
PatchSignature patchSig,
bool exactMatch,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:hunksig:{patchSig.CommitSha}";
var patchData = JsonDocument.Parse(JsonSerializer.Serialize(patchSig));
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(patchData));
var patchData = SerializeToElement(patchSig, out var patchBytes);
var dataHash = ComputeDataHash(patchBytes);
var evidence = new ProofEvidence
{
@@ -172,12 +224,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = exactMatch ? "hunksig_exact_tier3" : "hunksig_fuzzy_tier3",
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -193,19 +245,39 @@ public sealed class BackportProofGenerator
string fingerprintValue,
JsonDocument fingerprintData,
double confidence)
{
return FromBinaryFingerprint(
cveId,
packagePurl,
fingerprintMethod,
fingerprintValue,
fingerprintData,
confidence,
TimeProvider.System);
}
public static ProofBlob FromBinaryFingerprint(
string cveId,
string packagePurl,
string fingerprintMethod,
string fingerprintValue,
JsonDocument fingerprintData,
double confidence,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:binary:{fingerprintMethod}:{fingerprintValue}";
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(fingerprintData));
var dataElement = fingerprintData.RootElement.Clone();
var dataHash = ComputeDataHash(fingerprintData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.BinaryFingerprint,
Source = fingerprintMethod,
Timestamp = DateTimeOffset.UtcNow,
Data = fingerprintData,
Timestamp = timeProvider.GetUtcNow(),
Data = dataElement,
DataHash = dataHash
};
@@ -214,12 +286,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = $"binary_{fingerprintMethod}_tier4",
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -232,6 +304,15 @@ public sealed class BackportProofGenerator
string cveId,
string packagePurl,
IReadOnlyList<ProofEvidence> evidences)
{
return CombineEvidence(cveId, packagePurl, evidences, TimeProvider.System);
}
public static ProofBlob CombineEvidence(
string cveId,
string packagePurl,
IReadOnlyList<ProofEvidence> evidences,
TimeProvider timeProvider)
{
if (evidences.Count == 0)
{
@@ -251,12 +332,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = evidences,
Method = method,
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -270,19 +351,30 @@ public sealed class BackportProofGenerator
string packagePurl,
string reason,
JsonDocument versionData)
{
return NotAffected(cveId, packagePurl, reason, versionData, TimeProvider.System);
}
public static ProofBlob NotAffected(
string cveId,
string packagePurl,
string reason,
JsonDocument versionData,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:version_comparison:{cveId}";
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(versionData));
var dataElement = versionData.RootElement.Clone();
var dataHash = ComputeDataHash(versionData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.VersionComparison,
Source = "version_comparison",
Timestamp = DateTimeOffset.UtcNow,
Data = versionData,
Timestamp = timeProvider.GetUtcNow(),
Data = dataElement,
DataHash = dataHash
};
@@ -291,12 +383,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.NotAffected,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = reason,
Confidence = 0.95,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -309,6 +401,15 @@ public sealed class BackportProofGenerator
string cveId,
string packagePurl,
string reason)
{
return Vulnerable(cveId, packagePurl, reason, TimeProvider.System);
}
public static ProofBlob Vulnerable(
string cveId,
string packagePurl,
string reason,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
@@ -318,12 +419,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.Vulnerable,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = Array.Empty<ProofEvidence>(),
Method = reason,
Confidence = 0.85, // Lower confidence - absence of evidence is not evidence of absence
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -337,6 +438,16 @@ public sealed class BackportProofGenerator
string packagePurl,
string reason,
IReadOnlyList<ProofEvidence> partialEvidences)
{
return Unknown(cveId, packagePurl, reason, partialEvidences, TimeProvider.System);
}
public static ProofBlob Unknown(
string cveId,
string packagePurl,
string reason,
IReadOnlyList<ProofEvidence> partialEvidences,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
@@ -345,12 +456,12 @@ public sealed class BackportProofGenerator
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.Unknown,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = partialEvidences,
Method = reason,
Confidence = 0.0,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId()
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
@@ -418,9 +529,27 @@ public sealed class BackportProofGenerator
return $"multi_tier_combined_{types.Count}";
}
private static string GenerateSnapshotId()
private static string GenerateSnapshotId(TimeProvider timeProvider)
{
// Snapshot ID format: YYYYMMDD-HHMMSS-UTC
return DateTimeOffset.UtcNow.ToString("yyyyMMdd-HHmmss") + "-UTC";
return timeProvider.GetUtcNow().ToString("yyyyMMdd-HHmmss") + "-UTC";
}
private static JsonElement SerializeToElement<T>(T value, out byte[] jsonBytes)
{
jsonBytes = JsonSerializer.SerializeToUtf8Bytes(value);
using var document = JsonDocument.Parse(jsonBytes);
return document.RootElement.Clone();
}
private static string ComputeDataHash(ReadOnlySpan<byte> jsonBytes)
{
return CanonJson.Sha256Prefixed(CanonJson.CanonicalizeParsedJson(jsonBytes));
}
private static string ComputeDataHash(string json)
{
var bytes = Encoding.UTF8.GetBytes(json);
return ComputeDataHash(bytes);
}
}

View File

@@ -20,6 +20,17 @@ public sealed class BinaryFingerprintEvidenceGenerator
{
private const string ToolId = "stellaops.binaryindex";
private const string ToolVersion = "1.0.0";
private readonly TimeProvider _timeProvider;
public BinaryFingerprintEvidenceGenerator()
: this(TimeProvider.System)
{
}
public BinaryFingerprintEvidenceGenerator(TimeProvider timeProvider)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
/// Generate a proof segment from binary vulnerability findings.
@@ -28,8 +39,8 @@ public sealed class BinaryFingerprintEvidenceGenerator
{
ArgumentNullException.ThrowIfNull(predicate);
var predicateJson = JsonSerializer.SerializeToDocument(predicate, GetJsonOptions());
var dataHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(predicateJson));
var predicateJson = SerializeToElement(predicate, GetJsonOptions(), out var predicateBytes);
var dataHash = CanonJson.Sha256Prefixed(CanonJson.CanonicalizeParsedJson(predicateBytes));
// Create subject ID from binary key and scan context
var subjectId = $"binary:{predicate.BinaryIdentity.BinaryKey}";
@@ -42,15 +53,15 @@ public sealed class BinaryFingerprintEvidenceGenerator
var evidences = new List<ProofEvidence>();
foreach (var match in predicate.Matches)
{
var matchData = JsonSerializer.SerializeToDocument(match, GetJsonOptions());
var matchHash = CanonJson.Sha256Prefixed(CanonJson.Canonicalize(matchData));
var matchData = SerializeToElement(match, GetJsonOptions(), out var matchBytes);
var matchHash = CanonJson.Sha256Prefixed(CanonJson.CanonicalizeParsedJson(matchBytes));
evidences.Add(new ProofEvidence
{
EvidenceId = $"evidence:binary:{predicate.BinaryIdentity.BinaryKey}:{match.CveId}",
Type = EvidenceType.BinaryFingerprint,
Source = match.Method,
Timestamp = DateTimeOffset.UtcNow,
Timestamp = _timeProvider.GetUtcNow(),
Data = matchData,
DataHash = matchHash
});
@@ -65,7 +76,7 @@ public sealed class BinaryFingerprintEvidenceGenerator
ProofId = "", // Will be computed by ProofHashing.WithHash
SubjectId = subjectId,
Type = proofType,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = _timeProvider.GetUtcNow(),
Evidences = evidences,
Method = "binary_fingerprint_evidence",
Confidence = confidence,
@@ -176,9 +187,19 @@ public sealed class BinaryFingerprintEvidenceGenerator
return totalWeight > 0 ? Math.Min(weightedSum / totalWeight, 0.98) : 0.0;
}
private static string GenerateSnapshotId()
private string GenerateSnapshotId()
{
return DateTimeOffset.UtcNow.ToString("yyyyMMdd-HHmmss") + "-UTC";
return _timeProvider.GetUtcNow().ToString("yyyyMMdd-HHmmss") + "-UTC";
}
private static JsonElement SerializeToElement<T>(
T value,
JsonSerializerOptions options,
out byte[] jsonBytes)
{
jsonBytes = JsonSerializer.SerializeToUtf8Bytes(value, options);
using var document = JsonDocument.Parse(jsonBytes);
return document.RootElement.Clone();
}
private static JsonSerializerOptions GetJsonOptions()

View File

@@ -111,24 +111,26 @@ public sealed class PredicateSchemaValidator : IJsonSchemaValidator
}
/// <inheritdoc />
public async Task<SchemaValidationResult> ValidatePredicateAsync(
public Task<SchemaValidationResult> ValidatePredicateAsync(
string json,
string predicateType,
CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
if (!HasSchema(predicateType))
{
return SchemaValidationResult.Failure(new SchemaValidationError
return Task.FromResult(SchemaValidationResult.Failure(new SchemaValidationError
{
Path = "/",
Message = $"No schema registered for predicate type: {predicateType}",
Keyword = "predicateType"
});
}));
}
try
{
var document = JsonDocument.Parse(json);
using var document = JsonDocument.Parse(json);
// TODO: Implement actual JSON Schema validation
// For now, do basic structural checks
@@ -174,27 +176,29 @@ public sealed class PredicateSchemaValidator : IJsonSchemaValidator
}
return errors.Count > 0
? SchemaValidationResult.Failure(errors.ToArray())
: SchemaValidationResult.Success();
? Task.FromResult(SchemaValidationResult.Failure(errors.ToArray()))
: Task.FromResult(SchemaValidationResult.Success());
}
catch (JsonException ex)
{
return SchemaValidationResult.Failure(new SchemaValidationError
return Task.FromResult(SchemaValidationResult.Failure(new SchemaValidationError
{
Path = "/",
Message = $"Invalid JSON: {ex.Message}",
Keyword = "format"
});
}));
}
}
/// <inheritdoc />
public async Task<SchemaValidationResult> ValidateStatementAsync<T>(
public Task<SchemaValidationResult> ValidateStatementAsync<T>(
T statement,
CancellationToken ct = default) where T : Statements.InTotoStatement
{
ct.ThrowIfCancellationRequested();
var json = System.Text.Json.JsonSerializer.Serialize(statement);
return await ValidatePredicateAsync(json, statement.PredicateType, ct);
return ValidatePredicateAsync(json, statement.PredicateType, ct);
}
/// <inheritdoc />

View File

@@ -197,53 +197,119 @@ public sealed class Rfc8785JsonCanonicalizer : IJsonCanonicalizer
private static void WriteNumber(Utf8JsonWriter writer, JsonElement element)
{
var raw = element.GetRawText();
if (!double.TryParse(raw, NumberStyles.Float, CultureInfo.InvariantCulture, out var value) ||
double.IsNaN(value) ||
double.IsInfinity(value))
writer.WriteRawValue(NormalizeNumberString(raw), skipInputValidation: true);
}
private static string NormalizeNumberString(string raw)
{
if (string.IsNullOrWhiteSpace(raw))
{
throw new FormatException("Invalid JSON number.");
}
var index = 0;
var negative = raw[index] == '-';
if (negative)
{
index++;
}
var intStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == intStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
if (value == 0d)
var intPart = raw[intStart..index];
var fracPart = string.Empty;
if (index < raw.Length && raw[index] == '.')
{
writer.WriteRawValue("0", skipInputValidation: true);
return;
index++;
var fracStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == fracStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
fracPart = raw[fracStart..index];
}
var formatted = value.ToString("R", CultureInfo.InvariantCulture);
writer.WriteRawValue(NormalizeExponent(formatted), skipInputValidation: true);
var exponent = 0;
if (index < raw.Length && (raw[index] == 'e' || raw[index] == 'E'))
{
index++;
var expNegative = false;
if (index < raw.Length && (raw[index] == '+' || raw[index] == '-'))
{
expNegative = raw[index] == '-';
index++;
}
var expStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == expStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var expValue = int.Parse(raw[expStart..index], CultureInfo.InvariantCulture);
exponent = expNegative ? -expValue : expValue;
}
if (index != raw.Length)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var digits = (intPart + fracPart).TrimStart('0');
if (digits.Length == 0)
{
return "0";
}
var decimalExponent = exponent - fracPart.Length;
var normalized = decimalExponent >= 0
? digits + new string('0', decimalExponent)
: InsertDecimalPoint(digits, decimalExponent);
return negative ? "-" + normalized : normalized;
}
private static string NormalizeExponent(string formatted)
private static string InsertDecimalPoint(string digits, int decimalExponent)
{
var e = formatted.IndexOfAny(['E', 'e']);
if (e < 0)
var position = digits.Length + decimalExponent;
if (position > 0)
{
return formatted;
var integerPart = digits[..position].TrimStart('0');
if (integerPart.Length == 0)
{
integerPart = "0";
}
var fractionalPart = digits[position..].TrimEnd('0');
if (fractionalPart.Length == 0)
{
return integerPart;
}
return $"{integerPart}.{fractionalPart}";
}
var mantissa = formatted[..e];
var exponent = formatted[(e + 1)..];
if (string.IsNullOrWhiteSpace(exponent))
{
return mantissa;
}
var sign = string.Empty;
if (exponent[0] is '+' or '-')
{
sign = exponent[0] == '-' ? "-" : string.Empty;
exponent = exponent[1..];
}
exponent = exponent.TrimStart('0');
if (exponent.Length == 0)
{
// 1e0 -> 1
return mantissa;
}
return $"{mantissa}e{sign}{exponent}";
var zeros = new string('0', -position);
var fraction = (zeros + digits).TrimEnd('0');
return $"0.{fraction}";
}
}

View File

@@ -30,13 +30,14 @@ public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
throw new ArgumentException("At least one leaf is required.", nameof(leafValues));
}
var sortedLeaves = SortLeaves(leafValues);
var levels = new List<IReadOnlyList<byte[]>>();
// Level 0: Hash all leaf values
var leafHashes = new List<byte[]>(PadToPowerOfTwo(leafValues.Count));
for (var i = 0; i < leafValues.Count; i++)
var leafHashes = new List<byte[]>(PadToPowerOfTwo(sortedLeaves.Count));
for (var i = 0; i < sortedLeaves.Count; i++)
{
leafHashes.Add(SHA256.HashData(leafValues[i].Span));
leafHashes.Add(SHA256.HashData(sortedLeaves[i].Span));
}
// Pad with duplicate of last leaf hash (deterministic)
@@ -149,6 +150,49 @@ public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
return currentHash.AsSpan().SequenceEqual(expectedRoot);
}
private static IReadOnlyList<ReadOnlyMemory<byte>> SortLeaves(IReadOnlyList<ReadOnlyMemory<byte>> leaves)
{
if (leaves.Count <= 1)
{
return leaves;
}
var indexed = new List<(ReadOnlyMemory<byte> Value, int Index)>(leaves.Count);
for (var i = 0; i < leaves.Count; i++)
{
indexed.Add((leaves[i], i));
}
indexed.Sort(static (left, right) =>
{
var comparison = CompareBytes(left.Value.Span, right.Value.Span);
return comparison != 0 ? comparison : left.Index.CompareTo(right.Index);
});
var ordered = new ReadOnlyMemory<byte>[indexed.Count];
for (var i = 0; i < indexed.Count; i++)
{
ordered[i] = indexed[i].Value;
}
return ordered;
}
private static int CompareBytes(ReadOnlySpan<byte> left, ReadOnlySpan<byte> right)
{
var min = Math.Min(left.Length, right.Length);
for (var i = 0; i < min; i++)
{
var diff = left[i].CompareTo(right[i]);
if (diff != 0)
{
return diff;
}
}
return left.Length.CompareTo(right.Length);
}
private static int PadToPowerOfTwo(int count)
{
var power = 1;
@@ -168,4 +212,3 @@ public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
}
}

View File

@@ -70,7 +70,7 @@ public sealed record ProofEvidence
public required EvidenceType Type { get; init; }
public required string Source { get; init; }
public required DateTimeOffset Timestamp { get; init; }
public required JsonDocument Data { get; init; }
public required JsonElement Data { get; init; }
public required string DataHash { get; init; }
}

View File

@@ -4,6 +4,7 @@
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0062-M | DONE | Maintainability audit for StellaOps.Attestor.ProofChain. |
| AUDIT-0062-T | DONE | Test coverage audit for StellaOps.Attestor.ProofChain. |
| AUDIT-0062-A | TODO | Pending approval for changes. |
| AUDIT-0062-A | DONE | Applied determinism, time providers, canonicalization, schema validation, tests. |

Some files were not shown because too many files have changed in this diff Show More