Add Authority Advisory AI and API Lifecycle Configuration

- Introduced AuthorityAdvisoryAiOptions and related classes for managing advisory AI configurations, including remote inference options and tenant-specific settings.
- Added AuthorityApiLifecycleOptions to control API lifecycle settings, including legacy OAuth endpoint configurations.
- Implemented validation and normalization methods for both advisory AI and API lifecycle options to ensure proper configuration.
- Created AuthorityNotificationsOptions and its related classes for managing notification settings, including ack tokens, webhooks, and escalation options.
- Developed IssuerDirectoryClient and related models for interacting with the issuer directory service, including caching mechanisms and HTTP client configurations.
- Added support for dependency injection through ServiceCollectionExtensions for the Issuer Directory Client.
- Updated project file to include necessary package references for the new Issuer Directory Client library.
This commit is contained in:
master
2025-11-02 13:40:38 +02:00
parent 66cb6c4b8a
commit f98cea3bcf
516 changed files with 68157 additions and 24754 deletions

View File

@@ -29,15 +29,32 @@ components:
password:
tokenUrl: /token
refreshUrl: /token
scopes:
advisory:ingest: Submit advisory ingestion payloads.
advisory:read: Read advisory ingestion data.
aoc:verify: Execute Aggregation-Only Contract verification workflows.
authority.audit.read: Read Authority audit logs.
authority.clients.manage: Manage Authority client registrations.
authority.users.manage: Manage Authority users.
authority:tenants.read: Read the Authority tenant catalog.
concelier.jobs.trigger: Trigger Concelier aggregation jobs.
scopes:
attestor.write: Submit attestation bundles and Rekor entries.
attestor.verify: Invoke attestation verification APIs.
attestor.read: Fetch attestation entries and proofs.
advisory:ingest: Submit advisory ingestion payloads.
advisory:read: Read advisory ingestion data.
advisory-ai:view: View Advisory AI artefacts and cached outputs.
advisory-ai:operate: Submit Advisory AI inference and remediation requests.
advisory-ai:admin: Administer Advisory AI configuration, profiles, and remote execution.
aoc:verify: Execute Aggregation-Only Contract verification workflows.
airgap:seal: Seal or unseal an air-gapped installation.
airgap:import: Import offline bundles and mirror artifacts while air-gapped.
airgap:status:read: Read air-gap sealing status and staleness indicators.
obs:read: Read observability dashboards, SLO digests, and incident overlays.
timeline:read: Read incident timeline entries and annotations.
timeline:write: Append deterministic incident timeline events and annotations.
evidence:create: Create evidence items, upload artefacts, and link attestations.
evidence:read: Read evidence items, artefacts, and linkage metadata.
evidence:hold: Apply or release legal holds on evidence items.
attest:read: Read attestation records, DSSE bundles, and verification proofs.
obs:incident: Toggle incident mode, extend retention, enable emergency telemetry.
authority.audit.read: Read Authority audit logs.
authority.clients.manage: Manage Authority client registrations.
authority.users.manage: Manage Authority users.
authority:tenants.read: Read the Authority tenant catalog.
concelier.jobs.trigger: Trigger Concelier aggregation jobs.
concelier.merge: Manage Concelier merge operations.
effective:write: Write effective findings (Policy Engine service identity only).
email: Access email claim data.
@@ -72,17 +89,34 @@ components:
vex:ingest: Submit VEX ingestion payloads.
vex:read: Read VEX ingestion data.
vuln:read: Read vulnerability permalinks and overlays.
authorizationCode:
authorizationUrl: /authorize
tokenUrl: /token
refreshUrl: /token
scopes:
advisory:ingest: Submit advisory ingestion payloads.
advisory:read: Read advisory ingestion data.
aoc:verify: Execute Aggregation-Only Contract verification workflows.
authority.audit.read: Read Authority audit logs.
authority.clients.manage: Manage Authority client registrations.
authority.users.manage: Manage Authority users.
authorizationCode:
authorizationUrl: /authorize
tokenUrl: /token
refreshUrl: /token
scopes:
attestor.write: Submit attestation bundles and Rekor entries.
attestor.verify: Invoke attestation verification APIs.
attestor.read: Fetch attestation entries and proofs.
advisory:ingest: Submit advisory ingestion payloads.
advisory:read: Read advisory ingestion data.
advisory-ai:view: View Advisory AI artefacts and cached outputs.
advisory-ai:operate: Submit Advisory AI inference and remediation requests.
advisory-ai:admin: Administer Advisory AI configuration, profiles, and remote execution.
aoc:verify: Execute Aggregation-Only Contract verification workflows.
airgap:seal: Seal or unseal an air-gapped installation.
airgap:import: Import offline bundles and mirror artifacts while air-gapped.
airgap:status:read: Read air-gap sealing status and staleness indicators.
obs:read: Read observability dashboards, SLO digests, and incident overlays.
timeline:read: Read incident timeline entries and annotations.
timeline:write: Append deterministic incident timeline events and annotations.
evidence:create: Create evidence items, upload artefacts, and link attestations.
evidence:read: Read evidence items, artefacts, and linkage metadata.
evidence:hold: Apply or release legal holds on evidence items.
attest:read: Read attestation records, DSSE bundles, and verification proofs.
obs:incident: Toggle incident mode, extend retention, enable emergency telemetry.
authority.audit.read: Read Authority audit logs.
authority.clients.manage: Manage Authority client registrations.
authority.users.manage: Manage Authority users.
authority:tenants.read: Read the Authority tenant catalog.
concelier.jobs.trigger: Trigger Concelier aggregation jobs.
concelier.merge: Manage Concelier merge operations.
@@ -125,11 +159,25 @@ components:
flows:
clientCredentials:
tokenUrl: /token
scopes:
advisory:ingest: Submit advisory ingestion payloads.
advisory:read: Read advisory ingestion data.
aoc:verify: Execute Aggregation-Only Contract verification workflows.
authority.audit.read: Read Authority audit logs.
scopes:
attestor.write: Submit attestation bundles and Rekor entries.
attestor.verify: Invoke attestation verification APIs.
attestor.read: Fetch attestation entries and proofs.
advisory:ingest: Submit advisory ingestion payloads.
advisory:read: Read advisory ingestion data.
advisory-ai:view: View Advisory AI artefacts and cached outputs.
advisory-ai:operate: Submit Advisory AI inference and remediation requests.
advisory-ai:admin: Administer Advisory AI configuration, profiles, and remote execution.
aoc:verify: Execute Aggregation-Only Contract verification workflows.
obs:read: Read observability dashboards, SLO digests, and incident overlays.
timeline:read: Read incident timeline entries and annotations.
timeline:write: Append deterministic incident timeline events and annotations.
evidence:create: Create evidence items, upload artefacts, and link attestations.
evidence:read: Read evidence items, artefacts, and linkage metadata.
evidence:hold: Apply or release legal holds on evidence items.
attest:read: Read attestation records, DSSE bundles, and verification proofs.
obs:incident: Toggle incident mode, extend retention, enable emergency telemetry.
authority.audit.read: Read Authority audit logs.
authority.clients.manage: Manage Authority client registrations.
authority.users.manage: Manage Authority users.
authority:tenants.read: Read the Authority tenant catalog.

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Attestor.Envelope;
public enum DsseCompressionAlgorithm
{
None = 0,
Gzip = 1,
Brotli = 2
}

View File

@@ -0,0 +1,32 @@
using System;
namespace StellaOps.Attestor.Envelope;
public sealed record DsseDetachedPayloadReference
{
public DsseDetachedPayloadReference(string uri, string sha256, long? length = null, string? mediaType = null)
{
if (string.IsNullOrWhiteSpace(uri))
{
throw new ArgumentException("Detached payload URI must be provided.", nameof(uri));
}
if (string.IsNullOrWhiteSpace(sha256))
{
throw new ArgumentException("Detached payload digest must be provided.", nameof(sha256));
}
Uri = uri;
Sha256 = sha256.ToLowerInvariant();
Length = length;
MediaType = mediaType;
}
public string Uri { get; }
public string Sha256 { get; }
public long? Length { get; }
public string? MediaType { get; }
}

View File

@@ -0,0 +1,48 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Attestor.Envelope;
public sealed class DsseEnvelope
{
public DsseEnvelope(
string payloadType,
ReadOnlyMemory<byte> payload,
IEnumerable<DsseSignature> signatures,
string? payloadContentType = null,
DsseDetachedPayloadReference? detachedPayload = null)
{
if (string.IsNullOrWhiteSpace(payloadType))
{
throw new ArgumentException("payloadType must be provided.", nameof(payloadType));
}
PayloadType = payloadType;
Payload = payload;
PayloadContentType = payloadContentType;
DetachedPayload = detachedPayload;
var normalised = signatures?.ToArray() ?? Array.Empty<DsseSignature>();
if (normalised.Length == 0)
{
throw new ArgumentException("At least one signature must be supplied.", nameof(signatures));
}
// Deterministic ordering (keyid asc, signature asc) for canonical output.
Signatures = normalised
.OrderBy(static x => x.KeyId ?? string.Empty, StringComparer.Ordinal)
.ThenBy(static x => x.Signature, StringComparer.Ordinal)
.ToArray();
}
public string PayloadType { get; }
public ReadOnlyMemory<byte> Payload { get; }
public string? PayloadContentType { get; }
public IReadOnlyList<DsseSignature> Signatures { get; }
public DsseDetachedPayloadReference? DetachedPayload { get; }
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Attestor.Envelope;
public sealed class DsseEnvelopeSerializationOptions
{
public bool EmitCompactJson { get; init; } = true;
public bool EmitExpandedJson { get; init; } = true;
public bool IndentExpandedJson { get; init; } = true;
public bool IncludePayloadPreview { get; init; } = true;
public DsseCompressionAlgorithm CompressionAlgorithm { get; init; } = DsseCompressionAlgorithm.None;
}

View File

@@ -0,0 +1,38 @@
using System;
namespace StellaOps.Attestor.Envelope;
public sealed class DsseEnvelopeSerializationResult
{
public DsseEnvelopeSerializationResult(
byte[]? compactJson,
byte[]? expandedJson,
string payloadSha256,
int originalPayloadLength,
int embeddedPayloadLength,
DsseCompressionAlgorithm compression,
DsseDetachedPayloadReference? detachedPayload)
{
CompactJson = compactJson;
ExpandedJson = expandedJson;
PayloadSha256 = payloadSha256 ?? throw new ArgumentNullException(nameof(payloadSha256));
OriginalPayloadLength = originalPayloadLength;
EmbeddedPayloadLength = embeddedPayloadLength;
Compression = compression;
DetachedPayload = detachedPayload;
}
public byte[]? CompactJson { get; }
public byte[]? ExpandedJson { get; }
public string PayloadSha256 { get; }
public int OriginalPayloadLength { get; }
public int EmbeddedPayloadLength { get; }
public DsseCompressionAlgorithm Compression { get; }
public DsseDetachedPayloadReference? DetachedPayload { get; }
}

View File

@@ -0,0 +1,331 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.IO;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
namespace StellaOps.Attestor.Envelope;
public static class DsseEnvelopeSerializer
{
public static DsseEnvelopeSerializationResult Serialize(DsseEnvelope envelope, DsseEnvelopeSerializationOptions? options = null)
{
ArgumentNullException.ThrowIfNull(envelope);
options ??= new DsseEnvelopeSerializationOptions();
var originalPayload = envelope.Payload.ToArray();
var processedPayload = ApplyCompression(originalPayload, options.CompressionAlgorithm);
var payloadSha256 = Convert.ToHexString(SHA256.HashData(originalPayload)).ToLowerInvariant();
var payloadBase64 = Convert.ToBase64String(processedPayload);
byte[]? compactJson = null;
if (options.EmitCompactJson)
{
compactJson = BuildCompactJson(envelope.PayloadType, payloadBase64, envelope.Signatures);
}
byte[]? expandedJson = null;
if (options.EmitExpandedJson)
{
expandedJson = BuildExpandedJson(
envelope,
payloadBase64,
payloadSha256,
originalPayload.Length,
processedPayload.Length,
options,
originalPayload);
}
return new DsseEnvelopeSerializationResult(
compactJson,
expandedJson,
payloadSha256,
originalPayload.Length,
processedPayload.Length,
options.CompressionAlgorithm,
envelope.DetachedPayload);
}
private static byte[] BuildCompactJson(string payloadType, string payloadBase64, IReadOnlyList<DsseSignature> signatures)
{
var buffer = new ArrayBufferWriter<byte>();
using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions
{
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false
});
writer.WriteStartObject();
writer.WriteString("payloadType", payloadType);
writer.WriteString("payload", payloadBase64);
writer.WritePropertyName("signatures");
writer.WriteStartArray();
foreach (var signature in EnumerateCanonicalSignatures(signatures))
{
writer.WriteStartObject();
if (!string.IsNullOrWhiteSpace(signature.KeyId))
{
writer.WriteString("keyid", signature.KeyId);
}
writer.WriteString("sig", signature.Signature);
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WriteEndObject();
writer.Flush();
return buffer.WrittenSpan.ToArray();
}
private static byte[]? BuildExpandedJson(
DsseEnvelope envelope,
string payloadBase64,
string payloadSha256,
int originalPayloadLength,
int embeddedPayloadLength,
DsseEnvelopeSerializationOptions options,
byte[] originalPayload)
{
var buffer = new ArrayBufferWriter<byte>();
using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions
{
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = options.IndentExpandedJson
});
writer.WriteStartObject();
writer.WriteString("payloadType", envelope.PayloadType);
writer.WriteString("payload", payloadBase64);
writer.WritePropertyName("signatures");
writer.WriteStartArray();
foreach (var signature in EnumerateCanonicalSignatures(envelope.Signatures))
{
writer.WriteStartObject();
if (!string.IsNullOrWhiteSpace(signature.KeyId))
{
writer.WriteString("keyid", signature.KeyId);
}
writer.WriteString("sig", signature.Signature);
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("payloadInfo");
writer.WriteStartObject();
writer.WriteString("sha256", payloadSha256);
writer.WriteNumber("length", originalPayloadLength);
if (options.CompressionAlgorithm != DsseCompressionAlgorithm.None)
{
writer.WritePropertyName("compression");
writer.WriteStartObject();
writer.WriteString("algorithm", GetCompressionName(options.CompressionAlgorithm));
writer.WriteNumber("compressedLength", embeddedPayloadLength);
writer.WriteEndObject();
}
writer.WriteEndObject(); // payloadInfo
if (options.IncludePayloadPreview && TryWritePayloadPreview(envelope.PayloadContentType, originalPayload, writer))
{
// preview already written inside helper
}
if (envelope.DetachedPayload is not null)
{
writer.WritePropertyName("detachedPayload");
writer.WriteStartObject();
writer.WriteString("uri", envelope.DetachedPayload.Uri);
writer.WriteString("sha256", envelope.DetachedPayload.Sha256);
if (envelope.DetachedPayload.Length.HasValue)
{
writer.WriteNumber("length", envelope.DetachedPayload.Length.Value);
}
if (!string.IsNullOrWhiteSpace(envelope.DetachedPayload.MediaType))
{
writer.WriteString("mediaType", envelope.DetachedPayload.MediaType);
}
writer.WriteEndObject();
}
writer.WriteEndObject();
writer.Flush();
return buffer.WrittenSpan.ToArray();
}
private static bool TryWritePayloadPreview(string? contentType, byte[] originalPayload, Utf8JsonWriter writer)
{
if (string.IsNullOrWhiteSpace(contentType))
{
return false;
}
var lower = contentType.ToLowerInvariant();
if (!lower.Contains("json") && !lower.StartsWith("text/", StringComparison.Ordinal))
{
return false;
}
writer.WritePropertyName("payloadPreview");
writer.WriteStartObject();
writer.WriteString("mediaType", contentType);
if (lower.Contains("json") && TryParseJson(originalPayload, out var jsonDocument))
{
writer.WritePropertyName("json");
jsonDocument.WriteTo(writer);
jsonDocument.Dispose();
}
else if (TryDecodeUtf8(originalPayload, out var text))
{
writer.WriteString("text", text);
}
writer.WriteEndObject();
return true;
}
private static bool TryParseJson(byte[] payload, out JsonDocument document)
{
try
{
document = JsonDocument.Parse(payload);
return true;
}
catch (JsonException)
{
document = null!;
return false;
}
}
private static bool TryDecodeUtf8(byte[] payload, out string text)
{
var utf8 = new UTF8Encoding(false, true);
try
{
text = utf8.GetString(payload);
return true;
}
catch (DecoderFallbackException)
{
text = string.Empty;
return false;
}
}
private static byte[] ApplyCompression(byte[] payload, DsseCompressionAlgorithm algorithm)
{
return algorithm switch
{
DsseCompressionAlgorithm.None => payload,
DsseCompressionAlgorithm.Gzip => CompressWithStream(payload, static (stream) => new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)),
DsseCompressionAlgorithm.Brotli => CompressWithStream(payload, static (stream) => new BrotliStream(stream, CompressionLevel.SmallestSize, leaveOpen: true)),
_ => throw new NotSupportedException($"Compression algorithm '{algorithm}' is not supported.")
};
}
private static byte[] CompressWithStream(byte[] payload, Func<Stream, Stream> streamFactory)
{
if (payload.Length == 0)
{
return Array.Empty<byte>();
}
using var output = new MemoryStream();
using (var compressionStream = streamFactory(output))
{
compressionStream.Write(payload);
}
return output.ToArray();
}
private static string GetCompressionName(DsseCompressionAlgorithm algorithm)
{
return algorithm switch
{
DsseCompressionAlgorithm.Gzip => "gzip",
DsseCompressionAlgorithm.Brotli => "brotli",
DsseCompressionAlgorithm.None => "none",
_ => algorithm.ToString().ToLowerInvariant()
};
}
private static IEnumerable<DsseSignature> EnumerateCanonicalSignatures(IReadOnlyList<DsseSignature> signatures)
{
if (signatures.Count <= 1)
{
return signatures;
}
var comparer = CanonicalSignatureComparer.Instance;
var previous = signatures[0];
for (var i = 1; i < signatures.Count; i++)
{
var current = signatures[i];
if (comparer.Compare(previous, current) > 0)
{
var buffer = new List<DsseSignature>(signatures.Count);
for (var j = 0; j < signatures.Count; j++)
{
buffer.Add(signatures[j]);
}
buffer.Sort(comparer);
return buffer;
}
previous = current;
}
return signatures;
}
private sealed class CanonicalSignatureComparer : IComparer<DsseSignature>
{
public static CanonicalSignatureComparer Instance { get; } = new();
public int Compare(DsseSignature? x, DsseSignature? y)
{
if (ReferenceEquals(x, y))
{
return 0;
}
ArgumentNullException.ThrowIfNull(x);
ArgumentNullException.ThrowIfNull(y);
var keyComparison = string.Compare(x.KeyId, y.KeyId, StringComparison.Ordinal);
if (keyComparison != 0)
{
if (x.KeyId is null)
{
return -1;
}
if (y.KeyId is null)
{
return 1;
}
return keyComparison;
}
return string.Compare(x.Signature, y.Signature, StringComparison.Ordinal);
}
}
}

View File

@@ -0,0 +1,31 @@
using System;
namespace StellaOps.Attestor.Envelope;
public sealed record DsseSignature
{
public DsseSignature(string signature, string? keyId = null)
{
if (string.IsNullOrWhiteSpace(signature))
{
throw new ArgumentException("Signature must be provided.", nameof(signature));
}
Signature = signature;
KeyId = keyId;
}
public string Signature { get; }
public string? KeyId { get; }
public static DsseSignature FromBytes(ReadOnlySpan<byte> signature, string? keyId = null)
{
if (signature.IsEmpty)
{
throw new ArgumentException("Signature bytes must be provided.", nameof(signature));
}
return new DsseSignature(Convert.ToBase64String(signature), keyId);
}
}

View File

@@ -0,0 +1,301 @@
using System;
using System.Security.Cryptography;
using StellaOps.Cryptography;
namespace StellaOps.Attestor.Envelope;
/// <summary>
/// Describes the underlying key algorithm for DSSE envelope signing.
/// </summary>
public enum EnvelopeKeyKind
{
Ed25519,
Ecdsa
}
/// <summary>
/// Represents signing or verification key material for DSSE envelope operations.
/// </summary>
public sealed class EnvelopeKey
{
private const int Ed25519PublicKeyLength = 32;
private const int Ed25519PrivateKeySeedLength = 32;
private const int Ed25519PrivateKeyExpandedLength = 64;
private readonly byte[]? ed25519PublicKey;
private readonly byte[]? ed25519PrivateKey;
private readonly ECParameters? ecdsaPublicParameters;
private readonly ECParameters? ecdsaPrivateParameters;
private EnvelopeKey(
EnvelopeKeyKind kind,
string algorithmId,
string keyId,
byte[]? ed25519PublicKey,
byte[]? ed25519PrivateKey,
ECParameters? ecdsaPublicParameters,
ECParameters? ecdsaPrivateParameters)
{
Kind = kind;
AlgorithmId = algorithmId;
KeyId = keyId;
this.ed25519PublicKey = ed25519PublicKey;
this.ed25519PrivateKey = ed25519PrivateKey;
this.ecdsaPublicParameters = ecdsaPublicParameters;
this.ecdsaPrivateParameters = ecdsaPrivateParameters;
}
/// <summary>
/// Gets the key classification.
/// </summary>
public EnvelopeKeyKind Kind { get; }
/// <summary>
/// Gets the signing algorithm identifier (e.g., ED25519, ES256).
/// </summary>
public string AlgorithmId { get; }
/// <summary>
/// Gets the deterministic key identifier (RFC7638 JWK thumbprint based).
/// </summary>
public string KeyId { get; }
/// <summary>
/// Indicates whether the key has private material available.
/// </summary>
public bool HasPrivateMaterial => Kind switch
{
EnvelopeKeyKind.Ed25519 => ed25519PrivateKey is not null,
EnvelopeKeyKind.Ecdsa => ecdsaPrivateParameters.HasValue,
_ => false
};
/// <summary>
/// Indicates whether the key has public material available.
/// </summary>
public bool HasPublicMaterial => Kind switch
{
EnvelopeKeyKind.Ed25519 => ed25519PublicKey is not null,
EnvelopeKeyKind.Ecdsa => ecdsaPublicParameters.HasValue,
_ => false
};
internal ReadOnlySpan<byte> GetEd25519PublicKey()
{
if (Kind != EnvelopeKeyKind.Ed25519 || ed25519PublicKey is null)
{
throw new InvalidOperationException("Key does not provide Ed25519 public material.");
}
return ed25519PublicKey;
}
internal ReadOnlySpan<byte> GetEd25519PrivateKey()
{
if (Kind != EnvelopeKeyKind.Ed25519 || ed25519PrivateKey is null)
{
throw new InvalidOperationException("Key does not provide Ed25519 private material.");
}
return ed25519PrivateKey;
}
internal ECParameters GetEcdsaPublicParameters()
{
if (Kind != EnvelopeKeyKind.Ecdsa || !ecdsaPublicParameters.HasValue)
{
throw new InvalidOperationException("Key does not provide ECDSA public parameters.");
}
return CloneParameters(ecdsaPublicParameters.Value, includePrivate: false);
}
internal ECParameters GetEcdsaPrivateParameters()
{
if (Kind != EnvelopeKeyKind.Ecdsa || !ecdsaPrivateParameters.HasValue)
{
throw new InvalidOperationException("Key does not provide ECDSA private parameters.");
}
return CloneParameters(ecdsaPrivateParameters.Value, includePrivate: true);
}
/// <summary>
/// Creates an Ed25519 signing key (requires private + public material).
/// </summary>
/// <param name="privateKey">64-byte Ed25519 private key (seed || public key).</param>
/// <param name="publicKey">32-byte Ed25519 public key.</param>
/// <param name="keyId">Optional external key identifier override.</param>
/// <returns>Envelope key instance.</returns>
public static EnvelopeKey CreateEd25519Signer(ReadOnlySpan<byte> privateKey, ReadOnlySpan<byte> publicKey, string? keyId = null)
{
var normalizedPrivate = NormalizeEd25519PrivateKey(privateKey);
ValidateEd25519PublicLength(publicKey);
var publicCopy = publicKey.ToArray();
var resolvedKeyId = string.IsNullOrWhiteSpace(keyId)
? EnvelopeKeyIdCalculator.FromEd25519(publicCopy)
: keyId;
return new EnvelopeKey(
EnvelopeKeyKind.Ed25519,
SignatureAlgorithms.Ed25519,
resolvedKeyId,
publicCopy,
normalizedPrivate,
ecdsaPublicParameters: null,
ecdsaPrivateParameters: null);
}
/// <summary>
/// Creates an Ed25519 verification key (public material only).
/// </summary>
/// <param name="publicKey">32-byte Ed25519 public key.</param>
/// <param name="keyId">Optional external key identifier override.</param>
/// <returns>Envelope key instance.</returns>
public static EnvelopeKey CreateEd25519Verifier(ReadOnlySpan<byte> publicKey, string? keyId = null)
{
ValidateEd25519PublicLength(publicKey);
var publicCopy = publicKey.ToArray();
var resolvedKeyId = string.IsNullOrWhiteSpace(keyId)
? EnvelopeKeyIdCalculator.FromEd25519(publicCopy)
: keyId;
return new EnvelopeKey(
EnvelopeKeyKind.Ed25519,
SignatureAlgorithms.Ed25519,
resolvedKeyId,
publicCopy,
ed25519PrivateKey: null,
ecdsaPublicParameters: null,
ecdsaPrivateParameters: null);
}
/// <summary>
/// Creates an ECDSA signing key (private + public EC parameters).
/// </summary>
/// <param name="algorithmId">ECDSA algorithm identifier (ES256, ES384, ES512).</param>
/// <param name="privateParameters">EC parameters including private scalar.</param>
/// <param name="keyId">Optional external key identifier override.</param>
/// <returns>Envelope key instance.</returns>
public static EnvelopeKey CreateEcdsaSigner(string algorithmId, in ECParameters privateParameters, string? keyId = null)
{
ValidateEcdsaAlgorithm(algorithmId);
if (privateParameters.D is null || privateParameters.D.Length == 0)
{
throw new ArgumentException("ECDSA private parameters must include the scalar component (D).", nameof(privateParameters));
}
if (privateParameters.Q.X is null || privateParameters.Q.Y is null)
{
throw new ArgumentException("ECDSA private parameters must include public coordinates.", nameof(privateParameters));
}
var publicClone = CloneParameters(privateParameters, includePrivate: false);
var privateClone = CloneParameters(privateParameters, includePrivate: true);
var resolvedKeyId = string.IsNullOrWhiteSpace(keyId)
? EnvelopeKeyIdCalculator.FromEcdsa(algorithmId, publicClone)
: keyId;
return new EnvelopeKey(
EnvelopeKeyKind.Ecdsa,
algorithmId,
resolvedKeyId,
ed25519PublicKey: null,
ed25519PrivateKey: null,
ecdsaPublicParameters: publicClone,
ecdsaPrivateParameters: privateClone);
}
/// <summary>
/// Creates an ECDSA verification key (public EC parameters).
/// </summary>
/// <param name="algorithmId">ECDSA algorithm identifier (ES256, ES384, ES512).</param>
/// <param name="publicParameters">EC parameters containing only public coordinates.</param>
/// <param name="keyId">Optional external key identifier override.</param>
/// <returns>Envelope key instance.</returns>
public static EnvelopeKey CreateEcdsaVerifier(string algorithmId, in ECParameters publicParameters, string? keyId = null)
{
ValidateEcdsaAlgorithm(algorithmId);
if (publicParameters.Q.X is null || publicParameters.Q.Y is null)
{
throw new ArgumentException("ECDSA public parameters must include X and Y coordinates.", nameof(publicParameters));
}
if (publicParameters.D is not null)
{
throw new ArgumentException("ECDSA verification parameters must not include private scalar data.", nameof(publicParameters));
}
var publicClone = CloneParameters(publicParameters, includePrivate: false);
var resolvedKeyId = string.IsNullOrWhiteSpace(keyId)
? EnvelopeKeyIdCalculator.FromEcdsa(algorithmId, publicClone)
: keyId;
return new EnvelopeKey(
EnvelopeKeyKind.Ecdsa,
algorithmId,
resolvedKeyId,
ed25519PublicKey: null,
ed25519PrivateKey: null,
ecdsaPublicParameters: publicClone,
ecdsaPrivateParameters: null);
}
private static byte[] NormalizeEd25519PrivateKey(ReadOnlySpan<byte> privateKey)
{
return privateKey.Length switch
{
Ed25519PrivateKeySeedLength => privateKey.ToArray(),
Ed25519PrivateKeyExpandedLength => privateKey[..Ed25519PrivateKeySeedLength].ToArray(),
_ => throw new ArgumentException($"Ed25519 private key must be {Ed25519PrivateKeySeedLength} or {Ed25519PrivateKeyExpandedLength} bytes.", nameof(privateKey))
};
}
private static void ValidateEd25519PublicLength(ReadOnlySpan<byte> publicKey)
{
if (publicKey.Length != Ed25519PublicKeyLength)
{
throw new ArgumentException($"Ed25519 public key must be {Ed25519PublicKeyLength} bytes.", nameof(publicKey));
}
}
private static void ValidateEcdsaAlgorithm(string algorithmId)
{
if (string.IsNullOrWhiteSpace(algorithmId))
{
throw new ArgumentException("Algorithm identifier is required.", nameof(algorithmId));
}
var supported = string.Equals(algorithmId, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase)
|| string.Equals(algorithmId, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase)
|| string.Equals(algorithmId, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase);
if (!supported)
{
throw new ArgumentException($"Unsupported ECDSA algorithm '{algorithmId}'.", nameof(algorithmId));
}
}
private static ECParameters CloneParameters(ECParameters source, bool includePrivate)
{
var clone = new ECParameters
{
Curve = source.Curve,
Q = new ECPoint
{
X = source.Q.X is null ? null : (byte[])source.Q.X.Clone(),
Y = source.Q.Y is null ? null : (byte[])source.Q.Y.Clone()
}
};
if (includePrivate && source.D is not null)
{
clone.D = (byte[])source.D.Clone();
}
return clone;
}
}

View File

@@ -0,0 +1,54 @@
using System;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Attestor.Envelope;
internal static class EnvelopeKeyIdCalculator
{
public static string FromEd25519(ReadOnlySpan<byte> publicKey)
{
if (publicKey.Length != 32)
{
throw new ArgumentException("Ed25519 public key must be 32 bytes.", nameof(publicKey));
}
var jwk = $"{{\"crv\":\"Ed25519\",\"kty\":\"OKP\",\"x\":\"{ToBase64Url(publicKey)}\"}}";
return $"sha256:{ComputeSha256Base64Url(jwk)}";
}
public static string FromEcdsa(string algorithmId, in ECParameters parameters)
{
var curve = ResolveCurveName(algorithmId);
var x = parameters.Q.X ?? throw new ArgumentException("ECDSA public parameters missing X coordinate.", nameof(parameters));
var y = parameters.Q.Y ?? throw new ArgumentException("ECDSA public parameters missing Y coordinate.", nameof(parameters));
var jwk = $"{{\"crv\":\"{curve}\",\"kty\":\"EC\",\"x\":\"{ToBase64Url(x)}\",\"y\":\"{ToBase64Url(y)}\"}}";
return $"sha256:{ComputeSha256Base64Url(jwk)}";
}
private static string ResolveCurveName(string algorithmId) => algorithmId?.ToUpperInvariant() switch
{
"ES256" => "P-256",
"ES384" => "P-384",
"ES512" => "P-521",
_ => throw new ArgumentException($"Unsupported ECDSA algorithm '{algorithmId}'.", nameof(algorithmId))
};
private static string ComputeSha256Base64Url(string value)
{
using var sha = SHA256.Create();
var bytes = Encoding.UTF8.GetBytes(value);
var digest = sha.ComputeHash(bytes);
return ToBase64Url(digest);
}
private static string ToBase64Url(ReadOnlySpan<byte> value)
{
var base64 = Convert.ToBase64String(value);
return base64
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
}

View File

@@ -0,0 +1,48 @@
using System;
namespace StellaOps.Attestor.Envelope;
/// <summary>
/// Represents a DSSE envelope signature (detached from payload).
/// </summary>
public sealed class EnvelopeSignature
{
private readonly byte[] signature;
public EnvelopeSignature(string keyId, string algorithmId, ReadOnlySpan<byte> value)
{
if (string.IsNullOrWhiteSpace(keyId))
{
throw new ArgumentException("Key identifier is required.", nameof(keyId));
}
if (string.IsNullOrWhiteSpace(algorithmId))
{
throw new ArgumentException("Algorithm identifier is required.", nameof(algorithmId));
}
if (value.Length == 0)
{
throw new ArgumentException("Signature bytes must not be empty.", nameof(value));
}
KeyId = keyId;
AlgorithmId = algorithmId;
signature = value.ToArray();
}
/// <summary>
/// Gets the key identifier associated with the signature.
/// </summary>
public string KeyId { get; }
/// <summary>
/// Gets the signing algorithm identifier.
/// </summary>
public string AlgorithmId { get; }
/// <summary>
/// Gets the raw signature bytes.
/// </summary>
public ReadOnlyMemory<byte> Value => signature;
}

View File

@@ -0,0 +1,56 @@
using System;
namespace StellaOps.Attestor.Envelope;
/// <summary>
/// Error codes returned by envelope signing and verification helpers.
/// </summary>
public enum EnvelopeSignatureErrorCode
{
UnsupportedAlgorithm,
InvalidKeyMaterial,
MissingPrivateKey,
MissingPublicKey,
AlgorithmMismatch,
KeyIdMismatch,
InvalidSignatureFormat,
SignatureInvalid,
SigningFailed,
VerificationFailed
}
/// <summary>
/// Represents a deterministic error emitted by signature helpers.
/// </summary>
public sealed record EnvelopeSignatureError(EnvelopeSignatureErrorCode Code, string Message, Exception? Exception = null);
/// <summary>
/// Generic result wrapper providing success state and structured errors.
/// </summary>
public sealed class EnvelopeResult<T>
{
private EnvelopeResult(bool isSuccess, T? value, EnvelopeSignatureError? error)
{
IsSuccess = isSuccess;
this.value = value;
this.error = error;
}
public bool IsSuccess { get; }
public T Value => IsSuccess
? value ?? throw new InvalidOperationException("Successful result is missing value.")
: throw new InvalidOperationException("Cannot access Value when result indicates failure.");
public EnvelopeSignatureError Error => !IsSuccess
? error ?? throw new InvalidOperationException("Failed result is missing error information.")
: throw new InvalidOperationException("Cannot access Error when result indicates success.");
private readonly T? value;
private readonly EnvelopeSignatureError? error;
public static EnvelopeResult<T> Success(T value) => new(true, value, null);
public static EnvelopeResult<T> Failure(EnvelopeSignatureError error) => new(false, default, error);
}

View File

@@ -0,0 +1,164 @@
using System;
using System.Security.Cryptography;
using System.Threading;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Crypto.Signers;
namespace StellaOps.Attestor.Envelope;
/// <summary>
/// Provides Ed25519 and ECDSA helpers for creating and verifying DSSE envelope signatures.
/// </summary>
public sealed class EnvelopeSignatureService
{
private const int Ed25519SignatureLength = 64;
public EnvelopeResult<EnvelopeSignature> Sign(ReadOnlySpan<byte> payload, EnvelopeKey key, CancellationToken cancellationToken = default)
{
if (key is null)
{
throw new ArgumentNullException(nameof(key));
}
cancellationToken.ThrowIfCancellationRequested();
return key.Kind switch
{
EnvelopeKeyKind.Ed25519 => SignEd25519(payload, key),
EnvelopeKeyKind.Ecdsa => SignEcdsa(payload, key),
_ => EnvelopeResult<EnvelopeSignature>.Failure(Error(EnvelopeSignatureErrorCode.UnsupportedAlgorithm, $"Unsupported key kind '{key.Kind}'."))
};
}
public EnvelopeResult<bool> Verify(ReadOnlySpan<byte> payload, EnvelopeSignature signature, EnvelopeKey key, CancellationToken cancellationToken = default)
{
if (signature is null)
{
throw new ArgumentNullException(nameof(signature));
}
if (key is null)
{
throw new ArgumentNullException(nameof(key));
}
cancellationToken.ThrowIfCancellationRequested();
if (!key.HasPublicMaterial)
{
return EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.MissingPublicKey, "Verification requires public key material."));
}
if (!string.Equals(signature.KeyId, key.KeyId, StringComparison.Ordinal))
{
return EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.KeyIdMismatch, "Signature key identifier does not match the supplied key."));
}
if (!string.Equals(signature.AlgorithmId, key.AlgorithmId, StringComparison.OrdinalIgnoreCase))
{
return EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.AlgorithmMismatch, "Signature algorithm does not match the supplied key."));
}
return key.Kind switch
{
EnvelopeKeyKind.Ed25519 => VerifyEd25519(payload, signature, key),
EnvelopeKeyKind.Ecdsa => VerifyEcdsa(payload, signature, key),
_ => EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.UnsupportedAlgorithm, $"Unsupported key kind '{key.Kind}'."))
};
}
private static EnvelopeResult<EnvelopeSignature> SignEd25519(ReadOnlySpan<byte> payload, EnvelopeKey key)
{
if (!key.HasPrivateMaterial)
{
return EnvelopeResult<EnvelopeSignature>.Failure(Error(EnvelopeSignatureErrorCode.MissingPrivateKey, "Signing requires Ed25519 private material."));
}
try
{
var payloadBytes = payload.ToArray();
var privateKey = new Ed25519PrivateKeyParameters(key.GetEd25519PrivateKey().ToArray(), 0);
var signer = new Ed25519Signer();
signer.Init(true, privateKey);
signer.BlockUpdate(payloadBytes, 0, payloadBytes.Length);
var signatureBytes = signer.GenerateSignature();
return EnvelopeResult<EnvelopeSignature>.Success(new EnvelopeSignature(key.KeyId, key.AlgorithmId, signatureBytes));
}
catch (Exception ex) when (ex is ArgumentException or CryptographicException or InvalidOperationException)
{
return EnvelopeResult<EnvelopeSignature>.Failure(Error(EnvelopeSignatureErrorCode.SigningFailed, "Failed to produce Ed25519 signature.", ex));
}
}
private static EnvelopeResult<EnvelopeSignature> SignEcdsa(ReadOnlySpan<byte> payload, EnvelopeKey key)
{
if (!key.HasPrivateMaterial)
{
return EnvelopeResult<EnvelopeSignature>.Failure(Error(EnvelopeSignatureErrorCode.MissingPrivateKey, "Signing requires ECDSA private material."));
}
try
{
using var ecdsa = ECDsa.Create(key.GetEcdsaPrivateParameters());
var signatureBytes = ecdsa.SignData(payload, ResolveHashAlgorithm(key.AlgorithmId));
return EnvelopeResult<EnvelopeSignature>.Success(new EnvelopeSignature(key.KeyId, key.AlgorithmId, signatureBytes));
}
catch (Exception ex) when (ex is ArgumentException or CryptographicException or InvalidOperationException)
{
return EnvelopeResult<EnvelopeSignature>.Failure(Error(EnvelopeSignatureErrorCode.SigningFailed, "Failed to produce ECDSA signature.", ex));
}
}
private static EnvelopeResult<bool> VerifyEd25519(ReadOnlySpan<byte> payload, EnvelopeSignature signature, EnvelopeKey key)
{
var signatureBytes = signature.Value.Span;
if (signatureBytes.Length != Ed25519SignatureLength)
{
return EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.InvalidSignatureFormat, $"Ed25519 signatures must be {Ed25519SignatureLength} bytes."));
}
try
{
var payloadBytes = payload.ToArray();
var publicKey = new Ed25519PublicKeyParameters(key.GetEd25519PublicKey().ToArray(), 0);
var verifier = new Ed25519Signer();
verifier.Init(false, publicKey);
verifier.BlockUpdate(payloadBytes, 0, payloadBytes.Length);
var valid = verifier.VerifySignature(signatureBytes.ToArray());
return valid
? EnvelopeResult<bool>.Success(true)
: EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.SignatureInvalid, "Ed25519 signature verification failed."));
}
catch (Exception ex) when (ex is ArgumentException or CryptographicException)
{
return EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.VerificationFailed, "Failed to verify Ed25519 signature.", ex));
}
}
private static EnvelopeResult<bool> VerifyEcdsa(ReadOnlySpan<byte> payload, EnvelopeSignature signature, EnvelopeKey key)
{
try
{
using var ecdsa = ECDsa.Create(key.GetEcdsaPublicParameters());
var valid = ecdsa.VerifyData(payload, signature.Value.Span, ResolveHashAlgorithm(key.AlgorithmId));
return valid
? EnvelopeResult<bool>.Success(true)
: EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.SignatureInvalid, "ECDSA signature verification failed."));
}
catch (Exception ex) when (ex is ArgumentException or CryptographicException)
{
return EnvelopeResult<bool>.Failure(Error(EnvelopeSignatureErrorCode.VerificationFailed, "Failed to verify ECDSA signature.", ex));
}
}
private static HashAlgorithmName ResolveHashAlgorithm(string algorithmId) => algorithmId?.ToUpperInvariant() switch
{
"ES256" => HashAlgorithmName.SHA256,
"ES384" => HashAlgorithmName.SHA384,
"ES512" => HashAlgorithmName.SHA512,
_ => throw new ArgumentException($"Unsupported ECDSA algorithm '{algorithmId}'.", nameof(algorithmId))
};
private static EnvelopeSignatureError Error(EnvelopeSignatureErrorCode code, string message, Exception? exception = null)
=> new(code, message, exception);
}

View File

@@ -0,0 +1,57 @@
using System;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using FluentAssertions;
using Xunit;
using EnvelopeModel = StellaOps.Attestor.Envelope;
namespace StellaOps.Attestor.Envelope.Tests;
public sealed class DsseEnvelopeSerializerTests
{
private static readonly byte[] SamplePayload = Encoding.UTF8.GetBytes("deterministic-dsse-payload");
[Fact]
public void Serialize_ProducesDeterministicCompactJson_ForSignaturePermutations()
{
var signatures = new[]
{
EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("0A1B2C3D4E5F60718293A4B5C6D7E8F9"), "tenant-z"),
EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"), null),
EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("00112233445566778899AABBCCDDEEFF"), "tenant-a"),
EnvelopeModel.DsseSignature.FromBytes(Convert.FromHexString("1234567890ABCDEF1234567890ABCDEF"), "tenant-b")
};
var baselineEnvelope = new EnvelopeModel.DsseEnvelope("application/vnd.stellaops.test+json", SamplePayload, signatures);
var baseline = EnvelopeModel.DsseEnvelopeSerializer.Serialize(baselineEnvelope);
baseline.CompactJson.Should().NotBeNull();
var baselineJson = Encoding.UTF8.GetString(baseline.CompactJson!);
var rng = new Random(12345);
for (var iteration = 0; iteration < 32; iteration++)
{
var shuffled = signatures.OrderBy(_ => rng.Next()).ToArray();
var envelope = new EnvelopeModel.DsseEnvelope("application/vnd.stellaops.test+json", SamplePayload, shuffled);
var result = EnvelopeModel.DsseEnvelopeSerializer.Serialize(envelope);
result.CompactJson.Should().NotBeNull();
var json = Encoding.UTF8.GetString(result.CompactJson!);
json.Should().Be(baselineJson, "canonical JSON must be deterministic regardless of signature insertion order");
result.PayloadSha256.Should().Be(
Convert.ToHexString(SHA256.HashData(SamplePayload)).ToLowerInvariant(),
"payload hash must reflect the raw payload bytes");
using var document = JsonDocument.Parse(result.CompactJson!);
var keyIds = document.RootElement
.GetProperty("signatures")
.EnumerateArray()
.Select(element => element.TryGetProperty("keyid", out var key) ? key.GetString() : null)
.ToArray();
keyIds.Should().Equal(new string?[] { null, "tenant-a", "tenant-b", "tenant-z" },
"signatures must be ordered by key identifier (null first) for canonical output");
}
}
}

View File

@@ -0,0 +1,149 @@
using System;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using FluentAssertions;
using StellaOps.Attestor.Envelope;
using StellaOps.Cryptography;
using Xunit;
namespace StellaOps.Attestor.Envelope.Tests;
public sealed class EnvelopeSignatureServiceTests
{
private static readonly byte[] SamplePayload = Encoding.UTF8.GetBytes("stella-ops-deterministic");
private static readonly byte[] Ed25519Seed =
Convert.FromHexString("9D61B19DEFFD5A60BA844AF492EC2CC4" +
"4449C5697B326919703BAC031CAE7F60D75A980182B10AB7D54BFED3C964073A" +
"0EE172F3DAA62325AF021A68F707511A");
private static readonly byte[] Ed25519Public =
Convert.FromHexString("D75A980182B10AB7D54BFED3C964073A0EE172F3DAA62325AF021A68F707511A");
private readonly EnvelopeSignatureService service = new();
[Fact]
public void SignAndVerify_Ed25519_Succeeds()
{
var signingKey = EnvelopeKey.CreateEd25519Signer(Ed25519Seed, Ed25519Public);
var verifyKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public);
var signResult = service.Sign(SamplePayload, signingKey);
signResult.IsSuccess.Should().BeTrue();
signResult.Value.AlgorithmId.Should().Be(SignatureAlgorithms.Ed25519);
signResult.Value.KeyId.Should().Be(signingKey.KeyId);
var verifyResult = service.Verify(SamplePayload, signResult.Value, verifyKey);
verifyResult.IsSuccess.Should().BeTrue();
verifyResult.Value.Should().BeTrue();
var expectedKeyId = ComputeExpectedEd25519KeyId(Ed25519Public);
signingKey.KeyId.Should().Be(expectedKeyId);
}
[Fact]
public void Verify_Ed25519_InvalidSignature_ReturnsError()
{
var signingKey = EnvelopeKey.CreateEd25519Signer(Ed25519Seed, Ed25519Public);
var signResult = service.Sign(SamplePayload, signingKey);
signResult.IsSuccess.Should().BeTrue();
var tamperedBytes = signResult.Value.Value.ToArray();
tamperedBytes[0] ^= 0xFF;
var tamperedSignature = new EnvelopeSignature(signResult.Value.KeyId, signResult.Value.AlgorithmId, tamperedBytes);
var verifyKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public);
var verifyResult = service.Verify(SamplePayload, tamperedSignature, verifyKey);
verifyResult.IsSuccess.Should().BeFalse();
verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.SignatureInvalid);
}
[Fact]
public void SignAndVerify_EcdsaEs256_Succeeds()
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var privateParameters = ecdsa.ExportParameters(includePrivateParameters: true);
var publicParameters = ecdsa.ExportParameters(includePrivateParameters: false);
var signingKey = EnvelopeKey.CreateEcdsaSigner(SignatureAlgorithms.Es256, in privateParameters);
var verifyKey = EnvelopeKey.CreateEcdsaVerifier(SignatureAlgorithms.Es256, in publicParameters);
var signResult = service.Sign(SamplePayload, signingKey);
signResult.IsSuccess.Should().BeTrue();
var verifyResult = service.Verify(SamplePayload, signResult.Value, verifyKey);
verifyResult.IsSuccess.Should().BeTrue();
verifyResult.Value.Should().BeTrue();
}
[Fact]
public void Sign_WithVerificationOnlyKey_ReturnsMissingPrivateKey()
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var publicParameters = ecdsa.ExportParameters(includePrivateParameters: false);
var verifyOnlyKey = EnvelopeKey.CreateEcdsaVerifier(SignatureAlgorithms.Es256, in publicParameters);
var signResult = service.Sign(SamplePayload, verifyOnlyKey);
signResult.IsSuccess.Should().BeFalse();
signResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.MissingPrivateKey);
}
[Fact]
public void Verify_WithMismatchedKeyId_ReturnsError()
{
var signingKey = EnvelopeKey.CreateEd25519Signer(Ed25519Seed, Ed25519Public);
var signResult = service.Sign(SamplePayload, signingKey);
signResult.IsSuccess.Should().BeTrue();
var alternateKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public, "sha256:alternate");
var verifyResult = service.Verify(SamplePayload, signResult.Value, alternateKey);
verifyResult.IsSuccess.Should().BeFalse();
verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.KeyIdMismatch);
}
[Fact]
public void Verify_WithInvalidSignatureLength_ReturnsFormatError()
{
var verifyKey = EnvelopeKey.CreateEd25519Verifier(Ed25519Public);
var invalidSignature = new EnvelopeSignature(verifyKey.KeyId, verifyKey.AlgorithmId, new byte[16]);
var verifyResult = service.Verify(SamplePayload, invalidSignature, verifyKey);
verifyResult.IsSuccess.Should().BeFalse();
verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.InvalidSignatureFormat);
}
[Fact]
public void Verify_WithAlgorithmMismatch_ReturnsError()
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var privateParameters = ecdsa.ExportParameters(includePrivateParameters: true);
var publicParameters = ecdsa.ExportParameters(includePrivateParameters: false);
var signingKey = EnvelopeKey.CreateEcdsaSigner(SignatureAlgorithms.Es256, in privateParameters);
var signResult = service.Sign(SamplePayload, signingKey);
signResult.IsSuccess.Should().BeTrue();
var mismatchKey = EnvelopeKey.CreateEcdsaVerifier(SignatureAlgorithms.Es384, in publicParameters, signResult.Value.KeyId);
var verifyResult = service.Verify(SamplePayload, signResult.Value, mismatchKey);
verifyResult.IsSuccess.Should().BeFalse();
verifyResult.Error.Code.Should().Be(EnvelopeSignatureErrorCode.AlgorithmMismatch);
}
private static string ComputeExpectedEd25519KeyId(byte[] publicKey)
{
var jwk = $"{{\"crv\":\"Ed25519\",\"kty\":\"OKP\",\"x\":\"{ToBase64Url(publicKey)}\"}}";
using var sha = SHA256.Create();
var digest = sha.ComputeHash(Encoding.UTF8.GetBytes(jwk));
return $"sha256:{ToBase64Url(digest)}";
}
private static string ToBase64Url(byte[] bytes)
=> Convert.ToBase64String(bytes).TrimEnd('=').Replace('+', '-').Replace('/', '_');
}

View File

@@ -0,0 +1,22 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsNotAsErrors>NU1504</WarningsNotAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Attestor.Envelope.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,24 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" Version="2.5.1" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
</ItemGroup>
<ItemGroup>
<Compile Remove="__Tests\**\*.cs" />
<Compile Remove="StellaOps.Attestor.Envelope.Tests\**\*.cs" />
</ItemGroup>
</Project>

View File

@@ -1,13 +1,13 @@
# Attestation Envelope Task Board — Epic 19: Attestor Console
## Sprint 72 Foundations
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-ENVELOPE-72-001 | TODO | Envelope Guild | — | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Canonicalization deterministic (property tests); hash matches DSSE spec; unit tests green. |
| ATTEST-ENVELOPE-72-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Support compact and expanded JSON output, payload compression, and detached payload references. | API returns both variants; payload compression toggles tested; docs updated. |
## Sprint 73 Crypto Integration
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-ENVELOPE-73-001 | TODO | Envelope Guild, KMS Guild | ATTEST-ENVELOPE-72-001 | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Sign/verify tests pass with fixtures; invalid signatures produce deterministic errors. |
| ATTEST-ENVELOPE-73-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Fuzz suite integrated; coverage metrics recorded; no regressions. |
# Attestation Envelope Task Board — Epic 19: Attestor Console
## Sprint 72 Foundations
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-ENVELOPE-72-001 | DONE (2025-11-01) | Envelope Guild | — | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Canonicalization deterministic (property tests); hash matches DSSE spec; unit tests green. |
| ATTEST-ENVELOPE-72-002 | DONE | Envelope Guild | ATTEST-ENVELOPE-72-001 | Support compact and expanded JSON output, payload compression, and detached payload references. | API returns both variants; payload compression toggles tested; docs updated. |
## Sprint 73 Crypto Integration
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-ENVELOPE-73-001 | DONE | Envelope Guild, KMS Guild | ATTEST-ENVELOPE-72-001 | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Sign/verify tests pass with fixtures; invalid signatures produce deterministic errors. |
| ATTEST-ENVELOPE-73-002 | DONE | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Fuzz suite integrated; coverage metrics recorded; no regressions. |

View File

@@ -0,0 +1,139 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.Envelope;
using Xunit;
namespace StellaOps.Attestor.Envelope.Tests;
public sealed class DsseEnvelopeSerializerTests
{
[Fact]
public void Serialize_WithDefaultOptions_ProducesCompactAndExpandedJson()
{
var payload = Encoding.UTF8.GetBytes("{\"foo\":\"bar\"}");
var envelope = new DsseEnvelope(
"application/vnd.in-toto+json",
payload,
new[] { new DsseSignature("AQID") },
"application/json");
var result = DsseEnvelopeSerializer.Serialize(envelope);
Assert.NotNull(result.CompactJson);
Assert.NotNull(result.ExpandedJson);
var compact = Encoding.UTF8.GetString(result.CompactJson!);
Assert.Equal("{\"payloadType\":\"application/vnd.in-toto+json\",\"payload\":\"eyJmb28iOiJiYXIifQ==\",\"signatures\":[{\"sig\":\"AQID\"}]}", compact);
using var expanded = JsonDocument.Parse(result.ExpandedJson!);
var root = expanded.RootElement;
Assert.Equal("application/vnd.in-toto+json", root.GetProperty("payloadType").GetString());
Assert.Equal("eyJmb28iOiJiYXIifQ==", root.GetProperty("payload").GetString());
Assert.Equal("AQID", root.GetProperty("signatures")[0].GetProperty("sig").GetString());
var info = root.GetProperty("payloadInfo");
Assert.Equal(payload.Length, info.GetProperty("length").GetInt32());
Assert.Equal(result.PayloadSha256, info.GetProperty("sha256").GetString());
Assert.False(info.TryGetProperty("compression", out _));
var preview = root.GetProperty("payloadPreview");
Assert.Equal("application/json", preview.GetProperty("mediaType").GetString());
Assert.Equal("bar", preview.GetProperty("json").GetProperty("foo").GetString());
}
[Fact]
public void Serialize_WithCompressionEnabled_EmbedsCompressedPayloadMetadata()
{
var payload = Encoding.UTF8.GetBytes("{\"foo\":\"bar\",\"count\":1}");
var envelope = new DsseEnvelope(
"application/vnd.in-toto+json",
payload,
new[] { new DsseSignature("AQID") },
"application/json");
var options = new DsseEnvelopeSerializationOptions
{
CompressionAlgorithm = DsseCompressionAlgorithm.Gzip
};
var result = DsseEnvelopeSerializer.Serialize(envelope, options);
Assert.NotNull(result.CompactJson);
var compactDoc = JsonDocument.Parse(result.CompactJson!);
var payloadBase64 = compactDoc.RootElement.GetProperty("payload").GetString();
Assert.False(string.IsNullOrEmpty(payloadBase64));
var compressedBytes = Convert.FromBase64String(payloadBase64!);
using var compressedStream = new MemoryStream(compressedBytes);
using var gzip = new GZipStream(compressedStream, CompressionMode.Decompress);
using var decompressed = new MemoryStream();
gzip.CopyTo(decompressed);
Assert.True(payload.SequenceEqual(decompressed.ToArray()));
using var expanded = JsonDocument.Parse(result.ExpandedJson!);
var info = expanded.RootElement.GetProperty("payloadInfo");
Assert.Equal(payload.Length, info.GetProperty("length").GetInt32());
var compression = info.GetProperty("compression");
Assert.Equal("gzip", compression.GetProperty("algorithm").GetString());
Assert.Equal(compressedBytes.Length, compression.GetProperty("compressedLength").GetInt32());
Assert.Equal(DsseCompressionAlgorithm.Gzip, result.Compression);
Assert.Equal(payload.Length, result.OriginalPayloadLength);
Assert.Equal(compressedBytes.Length, result.EmbeddedPayloadLength);
}
[Fact]
public void Serialize_WithDetachedReference_WritesMetadata()
{
var payload = Encoding.UTF8.GetBytes("detached payload preview");
var reference = new DsseDetachedPayloadReference(
"https://evidence.example.com/sbom.json",
"abc123",
payload.Length,
"application/json");
var envelope = new DsseEnvelope(
"application/vnd.in-toto+json",
payload,
new[] { new DsseSignature("AQID") },
"text/plain",
reference);
var result = DsseEnvelopeSerializer.Serialize(envelope);
Assert.NotNull(result.ExpandedJson);
using var expanded = JsonDocument.Parse(result.ExpandedJson!);
var detached = expanded.RootElement.GetProperty("detachedPayload");
Assert.Equal(reference.Uri, detached.GetProperty("uri").GetString());
Assert.Equal(reference.Sha256, detached.GetProperty("sha256").GetString());
Assert.Equal(reference.Length, detached.GetProperty("length").GetInt64());
Assert.Equal(reference.MediaType, detached.GetProperty("mediaType").GetString());
}
[Fact]
public void Serialize_CompactOnly_SkipsExpandedPayload()
{
var payload = Encoding.UTF8.GetBytes("payload");
var envelope = new DsseEnvelope(
"application/vnd.in-toto+json",
payload,
new[] { new DsseSignature("AQID") });
var options = new DsseEnvelopeSerializationOptions
{
EmitExpandedJson = false
};
var result = DsseEnvelopeSerializer.Serialize(envelope, options);
Assert.NotNull(result.CompactJson);
Assert.Null(result.ExpandedJson);
}
}

View File

@@ -0,0 +1,30 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<WarningsNotAsErrors>NU1504</WarningsNotAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FsCheck.Xunit" Version="3.3.1" />
<PackageReference Include="FsCheck" Version="3.3.1" />
</ItemGroup>
<ItemGroup>
<Compile Remove="DsseEnvelopeFuzzTests.cs" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\..\\StellaOps.Attestor.Envelope.csproj" />
</ItemGroup>
</Project>

View File

@@ -23,3 +23,4 @@ Define strongly typed, versioned schemas for all attestation payloads and provid
- 3. Keep changes deterministic (stable ordering, timestamps, hashes) and align with offline/air-gap expectations.
- 4. Coordinate doc updates, tests, and cross-guild communication whenever contracts or workflows change.
- 5. Revert to `TODO` if you pause the task without shipping changes; leave notes in commit/PR descriptions for context.
- 6. When schemas or fixtures change, run `npm run docs:attestor:generate` followed by `npm run docs:attestor:validate` to refresh SDKs and guard parity.

View File

@@ -3,11 +3,11 @@
## Sprint 72 Schema Definition
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-TYPES-72-001 | TODO | Attestation Payloads Guild | — | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Schemas validated with test fixtures; docs stubbed; versioned under `schemas/`. |
| ATTEST-TYPES-72-002 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Code generation integrated; lints pass; unit tests cover round-trips. |
| ATTEST-TYPES-72-001 | DONE | Attestation Payloads Guild | — | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Schemas validated with test fixtures; docs stubbed; versioned under `schemas/`. |
| ATTEST-TYPES-72-002 | DONE | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Code generation integrated; lints pass; unit tests cover round-trips. |
## Sprint 73 Fixtures & Docs
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-TYPES-73-001 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Create golden payload samples for each type; integrate into tests and documentation. | Golden fixtures stored; tests compare outputs; docs embed examples. |
| ATTEST-TYPES-73-002 | TODO | Attestation Payloads Guild, Docs Guild | ATTEST-TYPES-73-001 | Publish schema reference docs (`/docs/modules/attestor/payloads.md`) with annotated JSON examples. | Doc merged with banner; examples validated by tests. |
| ATTEST-TYPES-73-001 | DONE | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Create golden payload samples for each type; integrate into tests and documentation. | Golden fixtures stored; tests compare outputs; docs embed examples. |
| ATTEST-TYPES-73-002 | DONE | Attestation Payloads Guild, Docs Guild | ATTEST-TYPES-73-001 | Publish schema reference docs (`/docs/modules/attestor/payloads.md`) with annotated JSON examples. | Doc merged with banner; examples validated by tests. |

View File

@@ -0,0 +1,9 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,107 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.BuildProvenance@1",
"subject": [
{
"subjectKind": "container-image",
"name": "registry.stella-ops.internal/scan/api",
"digest": {
"sha256": "5f4d4b1e9c2f3a1d7a4e5b6c7d8e9f00112233445566778899aabbccddeeff00"
},
"imageDigest": "sha256:5f4d4b1e9c2f3a1d7a4e5b6c7d8e9f00112233445566778899aabbccddeeff00",
"mediaType": "application/vnd.docker.distribution.manifest.v2+json"
}
],
"issuer": {
"issuerType": "service",
"id": "urn:stellaops:svc:builder",
"tenantId": "tenant-alpha",
"displayName": "StellaOps Build Service",
"workload": {
"service": "builder-web",
"cluster": "prod-us-east",
"namespace": "build-system"
},
"signingKey": {
"keyId": "builder-key-01",
"mode": "kms",
"algorithm": "ed25519",
"issuer": "vault.kms.internal"
}
},
"issuedAt": "2025-10-31T18:21:04Z",
"materials": [
{
"uri": "git+https://git.stella-ops.org/scanner.git@refs/heads/main",
"digest": {
"sha1": "a1b2c3d4e5f6a7b8c9d00112233445566778899a"
},
"role": "source"
},
{
"uri": "oci://registry.stella-ops.internal/base/node:20-bullseye",
"digest": {
"sha256": "ab40d8d0734c28f3b60df1e6a4ed3f2c1b5d7e9f0a1b2c3d4e5f66778899aabb"
},
"role": "base-image"
}
],
"transparency": [
{
"logId": "rekor-primary",
"logUrl": "https://rekor.stella-ops.internal",
"uuid": "cb2a6f2e-353e-4a62-8504-18f741fa0010",
"index": 128943,
"checkpoint": {
"origin": "rekor-primary",
"size": 155000,
"rootHash": "3rJcAM1b9x1Pcjwo8y9zKg2v1nX8/oe3mY4HhE2bY0g=",
"timestamp": "2025-10-31T18:21:06Z"
},
"witnessed": true
}
],
"build": {
"buildType": "stellaops:buildkit@v1",
"builder": {
"id": "urn:stellaops:builder:buildkit",
"version": "1.9.2",
"displayName": "BuildKit Runner"
},
"invocation": {
"configSource": {
"uri": "git+https://git.stella-ops.org/scanner.git//.stella/build.yaml",
"digest": {
"sha256": "1f7e26d668d9fd6bae1a5d0a7a27bf3cdf8b4dd0d9775ad911e6cef0e1edf1d2"
}
},
"parameters": {
"target": "release",
"platform": "linux/amd64"
},
"environment": {
"GIT_SHA": "9f3e7ad1",
"CI_PIPELINE_ID": "build-2045"
},
"entryPoint": "ci/scripts/build-image.sh"
},
"metadata": {
"startedAt": "2025-10-31T18:19:11Z",
"finishedAt": "2025-10-31T18:20:52Z",
"reproducible": true,
"buildDurationSeconds": 101
},
"outputs": [
{
"subjectKind": "artifact",
"name": "dist/scanner-api.tar",
"digest": {
"sha256": "cfe4b9b77b4a90d63ba6c2e5b40e6d9b9724f9a3e0d5b6c7f8e9d0a1b2c3d4e5"
},
"mediaType": "application/x-tar",
"sizeBytes": 31457280
}
]
},
"slsaLevel": "slsa3.0"
}

View File

@@ -0,0 +1,39 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.CustomEvidence@1",
"subject": [
{
"subjectKind": "artifact",
"name": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba",
"digest": {
"sha256": "f3b4c5d6e7f8091a2b3c4d5e6f708192a3b4c5d6e7f8091a2b3c4d5e6f708192"
}
}
],
"issuer": {
"issuerType": "automation",
"id": "urn:stellaops:automation:evidence-uploader",
"tenantId": "tenant-alpha",
"signingKey": {
"keyId": "automation-key-17",
"mode": "offline",
"algorithm": "ed25519"
}
},
"issuedAt": "2025-10-31T05:32:28Z",
"customSchema": {
"uri": "https://schemas.stella-ops.org/custom/runtime-evidence/v1.json",
"digest": {
"sha256": "aa11bb22cc33dd44ee55ff66aa77bb88cc99ddeeff0011223344556677889900"
},
"version": "1.0"
},
"payload": {
"controlId": "OPS-RUN-102",
"controlStatus": "passed",
"auditedBy": "auditor@example.org",
"evidenceUri": "s3://compliance-artifacts/runtime/api/2025-10-31/report.pdf",
"notes": "Manual security review completed for release 3.14.0."
},
"notes": "Custom evidence uploaded by compliance automation workflow."
}

View File

@@ -0,0 +1,77 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.PolicyEvaluation@1",
"subject": [
{
"subjectKind": "policy-report",
"name": "policy-eval/runtime-api@sha256:5f4d4b1e9c2f3a1d7a4e5b6c7d8e9f00112233445566778899aabbccddeeff00",
"digest": {
"sha256": "21f4b8d7c6e5a4f3b2c1d0e9f8a7b6c5d4e3f2a1b0c9d8e7f6a5b4c3d2e1f0a9"
}
}
],
"issuer": {
"issuerType": "service",
"id": "urn:stellaops:svc:policy-engine",
"tenantId": "tenant-alpha",
"signingKey": {
"keyId": "policy-engine-key",
"mode": "hsm",
"algorithm": "ed25519",
"issuer": "yubi-hsm"
}
},
"issuedAt": "2025-10-31T02:44:09Z",
"policy": {
"policyId": "runtime-enforce",
"policyVersion": "2025.10.1",
"revisionDigest": {
"sha256": "aa55bb66cc77dd88ee99ff00112233445566778899aabbccddeeff0011223344"
},
"mode": "enforce"
},
"result": {
"status": "fail",
"summary": "Policy runtime-enforce failed: 1 blocking rule violation.",
"violations": [
{
"ruleId": "RULE-RUNTIME-001",
"severity": "high",
"message": "Critical KEV vulnerabilities detected without waiver.",
"evidence": [
{
"type": "scan",
"id": "CVE-2025-10001"
}
],
"suggestedRemediation": "Apply patched base image or configure approved waiver."
}
],
"waiversApplied": [
"WAIVER-LICENSE-123"
]
},
"explain": [
{
"id": "trace-node-1",
"type": "rule",
"message": "Evaluated RULE-RUNTIME-001 on scan results"
},
{
"id": "trace-node-1.1",
"type": "binding",
"message": "Matched vulnerability CVE-2025-10001 with severity critical"
}
],
"metrics": {
"rulesEvaluated": 12,
"rulesPassed": 11,
"rulesFailed": 1,
"evaluationDurationMs": 84
},
"policyContext": {
"policyId": "runtime-enforce",
"policyVersion": "2025.10.1",
"mode": "enforce"
}
}

View File

@@ -0,0 +1,68 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.RiskProfileEvidence@1",
"subject": [
{
"subjectKind": "risk-profile",
"name": "runtime-api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba",
"digest": {
"sha256": "f3c2b1a0e9d8c7b6a5f4e3d2c1b0a9876543210fedcba9876543210fedcba987"
}
}
],
"issuer": {
"issuerType": "service",
"id": "urn:stellaops:svc:risk-engine",
"tenantId": "tenant-alpha",
"signingKey": {
"keyId": "risk-engine-key",
"mode": "kms",
"algorithm": "ed25519"
}
},
"issuedAt": "2025-10-31T04:00:00Z",
"window": {
"startedAt": "2025-10-30T04:00:00Z",
"endedAt": "2025-10-31T04:00:00Z"
},
"riskScores": {
"overall": 0.62,
"exploitability": 0.74,
"impact": 0.51,
"epss98Percentile": 0.92,
"kevCount": 1
},
"exposure": {
"internetFacing": true,
"runtimeEnforced": false,
"criticality": "mission-critical",
"deployments": 48
},
"controls": {
"sbomAttested": true,
"vexCoverage": "partial",
"policyStatus": "fail",
"lastPolicyEvaluation": "2025-10-31T02:44:09Z"
},
"findings": [
{
"category": "vulnerability",
"severity": "critical",
"summary": "KEV-listed OpenSSL vulnerability present without compensating control.",
"detail": "CVE-2025-10001 remained open in production deployments for >24h.",
"evidence": [
"scan:CVE-2025-10001",
"policy:RULE-RUNTIME-001"
]
},
{
"category": "runtime",
"severity": "medium",
"summary": "No runtime admission control for critical namespaces.",
"detail": "Zastava webhook disabled on cluster prod-us-east due to maintenance.",
"evidence": [
"zastava:event:2025-10-30T21:41Z"
]
}
]
}

View File

@@ -0,0 +1,80 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.SBOMAttestation@1",
"subject": [
{
"subjectKind": "container-image",
"name": "registry.stella-ops.internal/policy/engine",
"digest": {
"sha256": "4d7c3a1b2f9e0d6c5b4a3f2e1d0c9b8a7766554433221100ffaabbccddeeff12"
},
"imageDigest": "sha256:4d7c3a1b2f9e0d6c5b4a3f2e1d0c9b8a7766554433221100ffaabbccddeeff12"
}
],
"issuer": {
"issuerType": "service",
"id": "urn:stellaops:svc:scanner",
"tenantId": "tenant-alpha",
"signingKey": {
"keyId": "scanner-key-01",
"mode": "keyless",
"algorithm": "ecdsa-p256",
"issuer": "fulcio.internal",
"certificateChain": [
"-----BEGIN CERTIFICATE-----MIIB...==-----END CERTIFICATE-----"
]
}
},
"issuedAt": "2025-10-30T14:05:18Z",
"materials": [
{
"uri": "oci://registry.stella-ops.internal/scanner/sbom-indexer@sha256:1122aa55bb66cc77dd88ee99ff00112233445566778899aabbccddeeff001122",
"role": "scanner-runtime"
}
],
"transparency": [
{
"logId": "rekor-primary",
"logUrl": "https://rekor.stella-ops.internal",
"uuid": "11111111-2222-3333-4444-555555555555",
"index": 567890
}
],
"sbom": {
"format": "cyclonedx-json",
"specVersion": "1.6",
"digest": {
"sha256": "9a7b6c5d4e3f2a1b0c9d8e7f6a5b4c3d2e1f0a9b8c7d6e5f4a3b2c1d0e9f8a7b"
},
"contentUri": "cas://sbom/blobs/9a7b6c5d4e3f2a1b0c9d8e7f6a5b4c3d2e1f0a9b8c7d6e5f4a3b2c1d0e9f8a7b",
"contentMediaType": "application/vnd.cyclonedx+json;version=1.6",
"sizeBytes": 48213,
"descriptor": {
"bomRef": "urn:uuid:fa8706c2-2d3e-4e74-bc3e-337ca0fdf2f7",
"componentName": "policy-engine",
"componentVersion": "1.12.0"
},
"componentCounts": {
"packages": 215,
"dependencies": 214,
"services": 0,
"vulnerabilities": 14
}
},
"coverage": {
"layers": [
"sha256:aa11bb22cc33dd44ee55ff66aa77bb88cc99ddeeff00112233445566778899aa",
"sha256:bb22cc33dd44ee55ff66aa77bb88cc99ddeeff00112233445566778899aabbcc"
],
"packagesIncluded": true,
"licenseScanEnabled": true
},
"generator": {
"name": "StellaOps Scanner",
"version": "2.4.3",
"buildId": "scanner-build-8897",
"configurationDigest": {
"sha256": "abc1239f7e6d5c4b3a29181706f5e4d3c2b1a0f99887766554433221100ffeedd"
}
}
}

View File

@@ -0,0 +1,126 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.ScanResults@1",
"subject": [
{
"subjectKind": "scan-report",
"name": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba",
"digest": {
"sha256": "deafbeefdeafbeefdeafbeefdeafbeefdeafbeefdeafbeefdeafbeefdeafbeef"
},
"imageDigest": "sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba"
}
],
"issuer": {
"issuerType": "service",
"id": "urn:stellaops:svc:scanner.worker",
"tenantId": "tenant-alpha",
"signingKey": {
"keyId": "scanner-worker-key",
"mode": "keyless",
"algorithm": "ed25519",
"issuer": "fulcio.internal"
}
},
"issuedAt": "2025-10-29T06:14:45Z",
"materials": [
{
"uri": "git+https://git.stella-ops.org/runtime/api.git@refs/tags/v3.14.0",
"role": "source"
}
],
"transparency": [
{
"logId": "rekor-primary",
"logUrl": "https://rekor.stella-ops.internal",
"uuid": "33333333-4444-5555-6666-777777777777",
"index": 778899
}
],
"scanner": {
"name": "StellaOps Scanner",
"version": "2.4.3",
"runId": "scan-20251029-0614",
"configurationDigest": {
"sha256": "f1c2d3e4a5b60718293a4b5c6d7e8f90123456789abcdef0123456789abcdef0"
},
"mode": "inventory"
},
"summary": {
"totalFindings": 6,
"newFindings": 2,
"kevFindings": 1,
"fixableFindings": 4,
"severityCounts": {
"critical": 1,
"high": 2,
"medium": 2,
"low": 1,
"informational": 0
}
},
"policyContext": {
"policyId": "default-runtime-policy",
"policyVersion": "42",
"mode": "enforce"
},
"findings": [
{
"vulnerabilityId": "CVE-2025-10001",
"severity": "critical",
"status": "detected",
"kev": true,
"package": {
"name": "openssl",
"version": "3.0.12-3.el9",
"purl": "pkg:rpm/redhat/openssl@3.0.12-3.el9",
"type": "rpm"
},
"fixedVersion": "3.0.13-1.el9",
"introducedIn": "sha256:aa99887766554433221100ffeeddccbbaa99887766554433221100ffeeddccbb",
"evidence": {
"source": "os-packages",
"paths": [
"/usr/lib64/libssl.so.3"
],
"callers": [
"policy-engine"
]
}
},
{
"vulnerabilityId": "GHSA-1234-abcd-5678",
"severity": "high",
"status": "detected",
"kev": false,
"package": {
"name": "lodash",
"version": "4.17.21",
"purl": "pkg:npm/lodash@4.17.21",
"type": "npm"
},
"fixedVersion": "4.17.22",
"evidence": {
"source": "application-lockfile",
"paths": [
"/app/package-lock.json"
]
},
"notes": "Used by metrics exporter."
},
{
"vulnerabilityId": "CVE-2024-50010",
"severity": "medium",
"status": "remediated",
"kev": false,
"package": {
"name": "glibc",
"version": "2.36-60.el9",
"purl": "pkg:rpm/redhat/glibc@2.36-60.el9",
"type": "rpm"
},
"fixedVersion": "2.36-62.el9",
"notes": "Patched in base image refresh."
}
]
}

View File

@@ -0,0 +1,75 @@
{
"schemaVersion": "1.0.0",
"predicateType": "StellaOps.VEXAttestation@1",
"subject": [
{
"subjectKind": "vex-statement",
"name": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba",
"digest": {
"sha256": "8f6e5d4c3b2a190817263544554433221100ffeeddaabbccddeeff0011223344"
}
}
],
"issuer": {
"issuerType": "service",
"id": "urn:stellaops:svc:excitor",
"tenantId": "tenant-alpha",
"signingKey": {
"keyId": "vex-service-key",
"mode": "kms",
"algorithm": "ed25519",
"issuer": "kms.attestor.internal"
}
},
"issuedAt": "2025-10-30T09:12:03Z",
"vexStandard": "openvex-1.0",
"generator": {
"name": "StellaOps Excititor",
"version": "1.8.0"
},
"statements": [
{
"id": "stmt-001",
"vulnerabilityId": "CVE-2025-10001",
"status": "not_affected",
"statementType": "analysis",
"timestamp": "2025-10-30T09:11:40Z",
"justification": "Component not present in the deployed runtime closure.",
"impactStatement": "The affected OpenSSL module is unused by the runtime API image entrypoint chain.",
"products": [
{
"productId": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4...",
"name": "runtime-api",
"version": "3.14.0",
"purl": "pkg:oci/runtime-api@sha256:d2c3b4a5f6e7d8c9b0a1f2e3d4c5b6a79876543210fedcba9876543210fedcba"
}
],
"supplier": {
"name": "StellaOps Runtime Guild",
"id": "urn:stellaops:guild:runtime"
},
"references": [
"https://kb.stella-ops.org/vex/CVE-2025-10001"
]
},
{
"id": "stmt-002",
"vulnerabilityId": "GHSA-1234-abcd-5678",
"status": "affected",
"statementType": "remediation",
"timestamp": "2025-10-30T09:11:55Z",
"impactStatement": "Lodash is present in the telemetry plug-in; exploitation requires UID 0 inside the container.",
"actionStatement": "Upgrade telemetry plug-in to v2.1.5 or apply policy waiver until patch window.",
"products": [
{
"productId": "registry.stella-ops.internal/runtime/api@sha256:d2c3b4...",
"name": "runtime-api",
"version": "3.14.0"
}
],
"references": [
"https://github.com/lodash/lodash/security/advisory"
]
}
]
}

View File

@@ -0,0 +1,3 @@
module github.com/stella-ops/attestor/types
go 1.22

View File

@@ -0,0 +1,628 @@
// Code generated by StellaOps.Attestor.Types.Generator. DO NOT EDIT.
package attesttypes
import (
"encoding/json"
"errors"
"fmt"
)
type FindingStatus string
const (
FindingStatusDetected FindingStatus = "detected"
FindingStatusConfirmed FindingStatus = "confirmed"
FindingStatusFixed FindingStatus = "fixed"
FindingStatusNotAffected FindingStatus = "not_affected"
)
func (v FindingStatus) Validate() error {
switch v {
case FindingStatusDetected, FindingStatusConfirmed, FindingStatusFixed, FindingStatusNotAffected:
return nil
default:
return fmt.Errorf("invalid value for FindingStatus: %s", string(v))
}
}
type PolicyEffect string
const (
PolicyEffectAllow PolicyEffect = "allow"
PolicyEffectDeny PolicyEffect = "deny"
PolicyEffectWarn PolicyEffect = "warn"
)
func (v PolicyEffect) Validate() error {
switch v {
case PolicyEffectAllow, PolicyEffectDeny, PolicyEffectWarn:
return nil
default:
return fmt.Errorf("invalid value for PolicyEffect: %s", string(v))
}
}
type PolicyOutcome string
const (
PolicyOutcomePass PolicyOutcome = "pass"
PolicyOutcomeFail PolicyOutcome = "fail"
PolicyOutcomeWaived PolicyOutcome = "waived"
)
func (v PolicyOutcome) Validate() error {
switch v {
case PolicyOutcomePass, PolicyOutcomeFail, PolicyOutcomeWaived:
return nil
default:
return fmt.Errorf("invalid value for PolicyOutcome: %s", string(v))
}
}
type RiskLevel string
const (
RiskLevelCritical RiskLevel = "critical"
RiskLevelHigh RiskLevel = "high"
RiskLevelMedium RiskLevel = "medium"
RiskLevelLow RiskLevel = "low"
RiskLevelInformational RiskLevel = "informational"
)
func (v RiskLevel) Validate() error {
switch v {
case RiskLevelCritical, RiskLevelHigh, RiskLevelMedium, RiskLevelLow, RiskLevelInformational:
return nil
default:
return fmt.Errorf("invalid value for RiskLevel: %s", string(v))
}
}
type SbomFormat string
const (
SbomFormatCycloneDx16 SbomFormat = "CycloneDX-1.6"
SbomFormatSbom300 SbomFormat = "SBOM-3.0.0"
)
func (v SbomFormat) Validate() error {
switch v {
case SbomFormatCycloneDx16, SbomFormatSbom300:
return nil
default:
return fmt.Errorf("invalid value for SbomFormat: %s", string(v))
}
}
type Severity string
const (
SeverityCritical Severity = "critical"
SeverityHigh Severity = "high"
SeverityMedium Severity = "medium"
SeverityLow Severity = "low"
SeverityInfo Severity = "info"
)
func (v Severity) Validate() error {
switch v {
case SeverityCritical, SeverityHigh, SeverityMedium, SeverityLow, SeverityInfo:
return nil
default:
return fmt.Errorf("invalid value for Severity: %s", string(v))
}
}
type VexStatus string
const (
VexStatusNotAffected VexStatus = "not_affected"
VexStatusAffected VexStatus = "affected"
VexStatusUnderInvestigation VexStatus = "under_investigation"
VexStatusFixed VexStatus = "fixed"
)
func (v VexStatus) Validate() error {
switch v {
case VexStatusNotAffected, VexStatusAffected, VexStatusUnderInvestigation, VexStatusFixed:
return nil
default:
return fmt.Errorf("invalid value for VexStatus: %s", string(v))
}
}
const BuildProvenanceSchemaVersion = "StellaOps.BuildProvenance@1"
const CustomEvidenceSchemaVersion = "StellaOps.CustomEvidence@1"
const PolicyEvaluationSchemaVersion = "StellaOps.PolicyEvaluation@1"
const RiskProfileEvidenceSchemaVersion = "StellaOps.RiskProfileEvidence@1"
const SbomAttestationSchemaVersion = "StellaOps.SBOMAttestation@1"
const ScanResultsSchemaVersion = "StellaOps.ScanResults@1"
const VexAttestationSchemaVersion = "StellaOps.VEXAttestation@1"
type BuildMetadata struct {
BuildStartedOn string `json:"buildStartedOn"`
BuildFinishedOn string `json:"buildFinishedOn"`
Reproducible *bool `json:"reproducible,omitempty"`
BuildInvocationId *string `json:"buildInvocationId,omitempty"`
}
func (value *BuildMetadata) Validate() error {
if value == nil {
return errors.New("BuildMetadata is nil")
}
return nil
}
type BuildProvenance struct {
SchemaVersion string `json:"schemaVersion"`
BuildType string `json:"buildType"`
Builder BuilderIdentity `json:"builder"`
Materials []MaterialReference `json:"materials"`
Metadata BuildMetadata `json:"metadata"`
Environment *EnvironmentMetadata `json:"environment,omitempty"`
}
func (value *BuildProvenance) Validate() error {
if value == nil {
return errors.New("BuildProvenance is nil")
}
if value.SchemaVersion != "StellaOps.BuildProvenance@1" {
return fmt.Errorf("BuildProvenance.SchemaVersion must equal StellaOps.BuildProvenance@1")
}
if err := value.Builder.Validate(); err != nil {
return fmt.Errorf("invalid BuildProvenance.Builder: %w", err)
}
if len(value.Materials) < 1 {
return fmt.Errorf("BuildProvenance.Materials must contain at least 1 item(s)")
}
for i := range value.Materials {
if err := value.Materials[i].Validate(); err != nil {
return fmt.Errorf("invalid BuildProvenance.Materials[%d]: %w", i, err)
}
}
if err := value.Metadata.Validate(); err != nil {
return fmt.Errorf("invalid BuildProvenance.Metadata: %w", err)
}
if value.Environment != nil {
if err := value.Environment.Validate(); err != nil {
return fmt.Errorf("invalid BuildProvenance.Environment: %w", err)
}
}
return nil
}
type BuilderIdentity struct {
Id string `json:"id"`
Version *string `json:"version,omitempty"`
Platform *string `json:"platform,omitempty"`
}
func (value *BuilderIdentity) Validate() error {
if value == nil {
return errors.New("BuilderIdentity is nil")
}
return nil
}
type CustomEvidence struct {
SchemaVersion string `json:"schemaVersion"`
SubjectDigest string `json:"subjectDigest"`
Kind string `json:"kind"`
GeneratedAt string `json:"generatedAt"`
Properties []CustomProperty `json:"properties,omitempty"`
}
func (value *CustomEvidence) Validate() error {
if value == nil {
return errors.New("CustomEvidence is nil")
}
if value.SchemaVersion != "StellaOps.CustomEvidence@1" {
return fmt.Errorf("CustomEvidence.SchemaVersion must equal StellaOps.CustomEvidence@1")
}
for i := range value.Properties {
if err := value.Properties[i].Validate(); err != nil {
return fmt.Errorf("invalid CustomEvidence.Properties[%d]: %w", i, err)
}
}
return nil
}
type CustomProperty struct {
Key string `json:"key"`
Value string `json:"value"`
}
func (value *CustomProperty) Validate() error {
if value == nil {
return errors.New("CustomProperty is nil")
}
return nil
}
type DigestReference struct {
Algorithm string `json:"algorithm"`
Value string `json:"value"`
}
func (value *DigestReference) Validate() error {
if value == nil {
return errors.New("DigestReference is nil")
}
return nil
}
type EnvironmentMetadata struct {
Platform *string `json:"platform,omitempty"`
ImageDigest *DigestReference `json:"imageDigest,omitempty"`
}
func (value *EnvironmentMetadata) Validate() error {
if value == nil {
return errors.New("EnvironmentMetadata is nil")
}
if value.ImageDigest != nil {
if err := value.ImageDigest.Validate(); err != nil {
return fmt.Errorf("invalid EnvironmentMetadata.ImageDigest: %w", err)
}
}
return nil
}
type MaterialReference struct {
Uri string `json:"uri"`
Digests []DigestReference `json:"digests"`
Note *string `json:"note,omitempty"`
}
func (value *MaterialReference) Validate() error {
if value == nil {
return errors.New("MaterialReference is nil")
}
if len(value.Digests) < 1 {
return fmt.Errorf("MaterialReference.Digests must contain at least 1 item(s)")
}
for i := range value.Digests {
if err := value.Digests[i].Validate(); err != nil {
return fmt.Errorf("invalid MaterialReference.Digests[%d]: %w", i, err)
}
}
return nil
}
type PolicyDecision struct {
PolicyId string `json:"policyId"`
RuleId string `json:"ruleId"`
Effect PolicyEffect `json:"effect"`
Reason *string `json:"reason,omitempty"`
Remediation *string `json:"remediation,omitempty"`
}
func (value *PolicyDecision) Validate() error {
if value == nil {
return errors.New("PolicyDecision is nil")
}
if err := value.Effect.Validate(); err != nil {
return fmt.Errorf("invalid PolicyDecision.Effect: %w", err)
}
return nil
}
type PolicyEvaluation struct {
SchemaVersion string `json:"schemaVersion"`
SubjectDigest string `json:"subjectDigest"`
PolicyVersion string `json:"policyVersion"`
EvaluatedAt string `json:"evaluatedAt"`
Outcome PolicyOutcome `json:"outcome"`
Decisions []PolicyDecision `json:"decisions"`
}
func (value *PolicyEvaluation) Validate() error {
if value == nil {
return errors.New("PolicyEvaluation is nil")
}
if value.SchemaVersion != "StellaOps.PolicyEvaluation@1" {
return fmt.Errorf("PolicyEvaluation.SchemaVersion must equal StellaOps.PolicyEvaluation@1")
}
if err := value.Outcome.Validate(); err != nil {
return fmt.Errorf("invalid PolicyEvaluation.Outcome: %w", err)
}
for i := range value.Decisions {
if err := value.Decisions[i].Validate(); err != nil {
return fmt.Errorf("invalid PolicyEvaluation.Decisions[%d]: %w", i, err)
}
}
return nil
}
type RiskFactor struct {
Name string `json:"name"`
Weight float64 `json:"weight"`
Description *string `json:"description,omitempty"`
}
func (value *RiskFactor) Validate() error {
if value == nil {
return errors.New("RiskFactor is nil")
}
if value.Weight < 0 {
return fmt.Errorf("RiskFactor.Weight must be >= 0")
}
if value.Weight > 1 {
return fmt.Errorf("RiskFactor.Weight must be <= 1")
}
return nil
}
type RiskProfileEvidence struct {
SchemaVersion string `json:"schemaVersion"`
SubjectDigest string `json:"subjectDigest"`
GeneratedAt string `json:"generatedAt"`
RiskScore float64 `json:"riskScore"`
RiskLevel RiskLevel `json:"riskLevel"`
Factors []RiskFactor `json:"factors"`
}
func (value *RiskProfileEvidence) Validate() error {
if value == nil {
return errors.New("RiskProfileEvidence is nil")
}
if value.SchemaVersion != "StellaOps.RiskProfileEvidence@1" {
return fmt.Errorf("RiskProfileEvidence.SchemaVersion must equal StellaOps.RiskProfileEvidence@1")
}
if value.RiskScore < 0 {
return fmt.Errorf("RiskProfileEvidence.RiskScore must be >= 0")
}
if value.RiskScore > 100 {
return fmt.Errorf("RiskProfileEvidence.RiskScore must be <= 100")
}
if err := value.RiskLevel.Validate(); err != nil {
return fmt.Errorf("invalid RiskProfileEvidence.RiskLevel: %w", err)
}
for i := range value.Factors {
if err := value.Factors[i].Validate(); err != nil {
return fmt.Errorf("invalid RiskProfileEvidence.Factors[%d]: %w", i, err)
}
}
return nil
}
type SbomAttestation struct {
SchemaVersion string `json:"schemaVersion"`
SubjectDigest string `json:"subjectDigest"`
SbomFormat SbomFormat `json:"sbomFormat"`
SbomDigest DigestReference `json:"sbomDigest"`
SbomUri *string `json:"sbomUri,omitempty"`
ComponentCount float64 `json:"componentCount"`
Packages []SbomPackage `json:"packages,omitempty"`
}
func (value *SbomAttestation) Validate() error {
if value == nil {
return errors.New("SbomAttestation is nil")
}
if value.SchemaVersion != "StellaOps.SBOMAttestation@1" {
return fmt.Errorf("SbomAttestation.SchemaVersion must equal StellaOps.SBOMAttestation@1")
}
if err := value.SbomFormat.Validate(); err != nil {
return fmt.Errorf("invalid SbomAttestation.SbomFormat: %w", err)
}
if err := value.SbomDigest.Validate(); err != nil {
return fmt.Errorf("invalid SbomAttestation.SbomDigest: %w", err)
}
if value.ComponentCount < 0 {
return fmt.Errorf("SbomAttestation.ComponentCount must be >= 0")
}
for i := range value.Packages {
if err := value.Packages[i].Validate(); err != nil {
return fmt.Errorf("invalid SbomAttestation.Packages[%d]: %w", i, err)
}
}
return nil
}
type SbomPackage struct {
Purl string `json:"purl"`
Version *string `json:"version,omitempty"`
Licenses []string `json:"licenses,omitempty"`
}
func (value *SbomPackage) Validate() error {
if value == nil {
return errors.New("SbomPackage is nil")
}
if len(value.Licenses) < 1 {
return fmt.Errorf("SbomPackage.Licenses must contain at least 1 item(s)")
}
return nil
}
type ScanFinding struct {
Id string `json:"id"`
Severity Severity `json:"severity"`
Status FindingStatus `json:"status"`
PackageName string `json:"packageName"`
PackageVersion *string `json:"packageVersion,omitempty"`
CvssScore *float64 `json:"cvssScore,omitempty"`
Description *string `json:"description,omitempty"`
References []string `json:"references,omitempty"`
}
func (value *ScanFinding) Validate() error {
if value == nil {
return errors.New("ScanFinding is nil")
}
if err := value.Severity.Validate(); err != nil {
return fmt.Errorf("invalid ScanFinding.Severity: %w", err)
}
if err := value.Status.Validate(); err != nil {
return fmt.Errorf("invalid ScanFinding.Status: %w", err)
}
if value.CvssScore != nil {
if *value.CvssScore < 0 {
return fmt.Errorf("ScanFinding.CvssScore must be >= 0")
}
if *value.CvssScore > 10 {
return fmt.Errorf("ScanFinding.CvssScore must be <= 10")
}
}
if len(value.References) < 1 {
return fmt.Errorf("ScanFinding.References must contain at least 1 item(s)")
}
return nil
}
type ScanResults struct {
SchemaVersion string `json:"schemaVersion"`
SubjectDigest string `json:"subjectDigest"`
ScannerName string `json:"scannerName"`
ScannerVersion string `json:"scannerVersion"`
GeneratedAt string `json:"generatedAt"`
Findings []ScanFinding `json:"findings"`
}
func (value *ScanResults) Validate() error {
if value == nil {
return errors.New("ScanResults is nil")
}
if value.SchemaVersion != "StellaOps.ScanResults@1" {
return fmt.Errorf("ScanResults.SchemaVersion must equal StellaOps.ScanResults@1")
}
for i := range value.Findings {
if err := value.Findings[i].Validate(); err != nil {
return fmt.Errorf("invalid ScanResults.Findings[%d]: %w", i, err)
}
}
return nil
}
type VexAttestation struct {
SchemaVersion string `json:"schemaVersion"`
SubjectDigest string `json:"subjectDigest"`
GeneratedAt string `json:"generatedAt"`
Statements []VexStatement `json:"statements"`
}
func (value *VexAttestation) Validate() error {
if value == nil {
return errors.New("VexAttestation is nil")
}
if value.SchemaVersion != "StellaOps.VEXAttestation@1" {
return fmt.Errorf("VexAttestation.SchemaVersion must equal StellaOps.VEXAttestation@1")
}
if len(value.Statements) < 1 {
return fmt.Errorf("VexAttestation.Statements must contain at least 1 item(s)")
}
for i := range value.Statements {
if err := value.Statements[i].Validate(); err != nil {
return fmt.Errorf("invalid VexAttestation.Statements[%d]: %w", i, err)
}
}
return nil
}
type VexStatement struct {
VulnerabilityId string `json:"vulnerabilityId"`
Status VexStatus `json:"status"`
Timestamp string `json:"timestamp"`
Justification *string `json:"justification,omitempty"`
ImpactStatement *string `json:"impactStatement,omitempty"`
ActionStatement *string `json:"actionStatement,omitempty"`
References []string `json:"references,omitempty"`
}
func (value *VexStatement) Validate() error {
if value == nil {
return errors.New("VexStatement is nil")
}
if err := value.Status.Validate(); err != nil {
return fmt.Errorf("invalid VexStatement.Status: %w", err)
}
if len(value.References) < 1 {
return fmt.Errorf("VexStatement.References must contain at least 1 item(s)")
}
return nil
}
func (value *BuildProvenance) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal BuildProvenance: %w", err)
}
return buf, nil
}
func (value *CustomEvidence) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal CustomEvidence: %w", err)
}
return buf, nil
}
func (value *PolicyEvaluation) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal PolicyEvaluation: %w", err)
}
return buf, nil
}
func (value *RiskProfileEvidence) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal RiskProfileEvidence: %w", err)
}
return buf, nil
}
func (value *SbomAttestation) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal SbomAttestation: %w", err)
}
return buf, nil
}
func (value *ScanResults) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal ScanResults: %w", err)
}
return buf, nil
}
func (value *VexAttestation) CanonicalJSON() ([]byte, error) {
if err := value.Validate(); err != nil {
return nil, err
}
buf, err := json.Marshal(value)
if err != nil {
return nil, fmt.Errorf("failed to marshal VexAttestation: %w", err)
}
return buf, nil
}

View File

@@ -0,0 +1,239 @@
package attesttypes
import (
"fmt"
"testing"
)
func hexString(ch byte) string {
buf := make([]byte, 64)
for i := range buf {
buf[i] = ch
}
return string(buf)
}
func sampleBuildProvenance() BuildProvenance {
return BuildProvenance{
SchemaVersion: BuildProvenanceSchemaVersion,
BuildType: "docker/buildx",
Builder: BuilderIdentity{
Id: "builder://stellaops/ci",
Version: stringPtr("2025.10.31"),
Platform: stringPtr("linux/amd64"),
},
Materials: []MaterialReference{
{
Uri: "git+https://git.stella-ops.org/org/repo@refs/heads/main",
Digests: []DigestReference{
{Algorithm: "sha256", Value: hexString('a')},
},
Note: stringPtr("Source repository"),
},
},
Metadata: BuildMetadata{
BuildStartedOn: "2025-10-31T12:00:00Z",
BuildFinishedOn: "2025-10-31T12:05:00Z",
Reproducible: boolPtr(true),
BuildInvocationId: stringPtr("invocations/123"),
},
Environment: &EnvironmentMetadata{
Platform: stringPtr("linux/amd64"),
ImageDigest: &DigestReference{Algorithm: "sha256", Value: hexString('b')},
},
}
}
func sampleSbomAttestation() SbomAttestation {
return SbomAttestation{
SchemaVersion: SbomAttestationSchemaVersion,
SubjectDigest: fmt.Sprintf("sha256:%s", hexString('c')),
SbomFormat: "CycloneDX-1.6",
SbomDigest: DigestReference{Algorithm: "sha256", Value: hexString('d')},
SbomUri: stringPtr("https://example.invalid/sbom.json"),
ComponentCount: 2,
Packages: []SbomPackage{
{
Purl: "pkg:npm/%40stellaops/example@1.0.0",
Version: stringPtr("1.0.0"),
Licenses: []string{"MIT"},
},
},
}
}
func sampleVexAttestation() VexAttestation {
return VexAttestation{
SchemaVersion: VexAttestationSchemaVersion,
SubjectDigest: fmt.Sprintf("sha256:%s", hexString('e')),
GeneratedAt: "2025-10-31T12:10:00Z",
Statements: []VexStatement{
{
VulnerabilityId: "CVE-2025-1234",
Status: VexStatusNotAffected,
Timestamp: "2025-10-31T12:10:00Z",
Justification: stringPtr("component_not_present"),
References: []string{"https://example.invalid/advisory"},
},
},
}
}
func sampleScanResults() ScanResults {
return ScanResults{
SchemaVersion: ScanResultsSchemaVersion,
SubjectDigest: fmt.Sprintf("sha256:%s", hexString('f')),
ScannerName: "stellaops/scanner",
ScannerVersion: "2025.10.31",
GeneratedAt: "2025-10-31T12:15:00Z",
Findings: []ScanFinding{
{
Id: "FIND-001",
Severity: SeverityMedium,
Status: FindingStatusDetected,
PackageName: "libexample",
PackageVersion: stringPtr("1.2.3"),
CvssScore: floatPtr(7.5),
Description: stringPtr("Example vulnerability"),
References: []string{"https://example.invalid/CVE-2025-1234"},
},
},
}
}
func samplePolicyEvaluation() PolicyEvaluation {
return PolicyEvaluation{
SchemaVersion: PolicyEvaluationSchemaVersion,
SubjectDigest: fmt.Sprintf("sha256:%s", hexString('1')),
PolicyVersion: "2025.10.31",
EvaluatedAt: "2025-10-31T12:18:00Z",
Outcome: PolicyOutcomePass,
Decisions: []PolicyDecision{
{
PolicyId: "policy/access-control",
RuleId: "rule/allow-latest",
Effect: PolicyEffectAllow,
Reason: stringPtr("No blocking findings"),
},
},
}
}
func sampleRiskProfile() RiskProfileEvidence {
return RiskProfileEvidence{
SchemaVersion: RiskProfileEvidenceSchemaVersion,
SubjectDigest: fmt.Sprintf("sha256:%s", hexString('2')),
GeneratedAt: "2025-10-31T12:20:00Z",
RiskScore: 42.5,
RiskLevel: RiskLevelMedium,
Factors: []RiskFactor{
{
Name: "exploitability",
Weight: 0.6,
Description: stringPtr("No known exploits published"),
},
},
}
}
func sampleCustomEvidence() CustomEvidence {
return CustomEvidence{
SchemaVersion: CustomEvidenceSchemaVersion,
SubjectDigest: fmt.Sprintf("sha256:%s", hexString('3')),
Kind: "org.stellaops.demo/custom",
GeneratedAt: "2025-10-31T12:25:00Z",
Properties: []CustomProperty{
{Key: "note", Value: "Custom attestation payload"},
},
}
}
func TestBuildProvenanceRoundTrip(t *testing.T) {
sample := sampleBuildProvenance()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
a, err := sample.CanonicalJSON()
if err != nil {
t.Fatalf("canonical JSON: %v", err)
}
b, err := sample.CanonicalJSON()
if err != nil {
t.Fatalf("canonical JSON repeat: %v", err)
}
if string(a) != string(b) {
t.Fatalf("canonical output mismatch:\n%s\n%s", string(a), string(b))
}
}
func TestSbomAttestationRoundTrip(t *testing.T) {
sample := sampleSbomAttestation()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
if _, err := sample.CanonicalJSON(); err != nil {
t.Fatalf("canonical JSON: %v", err)
}
}
func TestVexAttestationRoundTrip(t *testing.T) {
sample := sampleVexAttestation()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
if _, err := sample.CanonicalJSON(); err != nil {
t.Fatalf("canonical JSON: %v", err)
}
}
func TestScanResultsRoundTrip(t *testing.T) {
sample := sampleScanResults()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
if _, err := sample.CanonicalJSON(); err != nil {
t.Fatalf("canonical JSON: %v", err)
}
}
func TestPolicyEvaluationRoundTrip(t *testing.T) {
sample := samplePolicyEvaluation()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
if _, err := sample.CanonicalJSON(); err != nil {
t.Fatalf("canonical JSON: %v", err)
}
}
func TestRiskProfileRoundTrip(t *testing.T) {
sample := sampleRiskProfile()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
if _, err := sample.CanonicalJSON(); err != nil {
t.Fatalf("canonical JSON: %v", err)
}
}
func TestCustomEvidenceRoundTrip(t *testing.T) {
sample := sampleCustomEvidence()
if err := sample.Validate(); err != nil {
t.Fatalf("validate: %v", err)
}
if _, err := sample.CanonicalJSON(); err != nil {
t.Fatalf("canonical JSON: %v", err)
}
}
func TestCustomEvidenceSchemaVersionMismatch(t *testing.T) {
sample := sampleCustomEvidence()
sample.SchemaVersion = "StellaOps.CustomEvidence@9"
if err := sample.Validate(); err == nil {
t.Fatal("expected schemaVersion mismatch to fail validation")
}
}
func stringPtr(v string) *string { return &v }
func boolPtr(v bool) *bool { return &v }
func floatPtr(v float64) *float64 { return &v }

View File

@@ -0,0 +1,195 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import {
canonicalizeBuildProvenance,
canonicalizeCustomEvidence,
canonicalizePolicyEvaluation,
canonicalizeRiskProfileEvidence,
canonicalizeSbomAttestation,
canonicalizeScanResults,
canonicalizeVexAttestation,
validateBuildProvenance,
validateCustomEvidence,
validatePolicyEvaluation,
validateRiskProfileEvidence,
validateSbomAttestation,
validateScanResults,
validateVexAttestation
} from './index.js';
const hex = (char: string): string => char.repeat(64);
const buildProvenanceSample = {
schemaVersion: 'StellaOps.BuildProvenance@1' as const,
buildType: 'docker/buildx',
builder: {
id: 'builder://stellaops/ci',
version: '2025.10.31',
platform: 'linux/amd64'
},
materials: [
{
uri: 'git+https://git.stella-ops.org/org/repo@refs/heads/main',
digests: [
{ algorithm: 'sha256', value: hex('a') }
],
note: 'Source repository'
}
],
metadata: {
buildStartedOn: '2025-10-31T12:00:00Z',
buildFinishedOn: '2025-10-31T12:05:00Z',
reproducible: true,
buildInvocationId: 'invocations/123'
},
environment: {
platform: 'linux/amd64',
imageDigest: { algorithm: 'sha256', value: hex('b') }
}
};
const sbomAttestationSample = {
schemaVersion: 'StellaOps.SBOMAttestation@1' as const,
subjectDigest: `sha256:${hex('c')}`,
sbomFormat: 'CycloneDX-1.6' as const,
sbomDigest: { algorithm: 'sha256', value: hex('d') },
sbomUri: 'https://example.invalid/sbom.json',
componentCount: 2,
packages: [
{
purl: 'pkg:npm/%40stellaops/example@1.0.0',
version: '1.0.0',
licenses: ['MIT']
}
]
};
const vexAttestationSample = {
schemaVersion: 'StellaOps.VEXAttestation@1' as const,
subjectDigest: `sha256:${hex('e')}`,
generatedAt: '2025-10-31T12:10:00Z',
statements: [
{
vulnerabilityId: 'CVE-2025-1234',
status: 'not_affected' as const,
timestamp: '2025-10-31T12:10:00Z',
justification: 'component_not_present',
references: ['https://example.invalid/advisory']
}
]
};
const scanResultsSample = {
schemaVersion: 'StellaOps.ScanResults@1' as const,
subjectDigest: `sha256:${hex('f')}`,
scannerName: 'stellaops/scanner',
scannerVersion: '2025.10.31',
generatedAt: '2025-10-31T12:15:00Z',
findings: [
{
id: 'FIND-001',
severity: 'medium' as const,
status: 'detected' as const,
packageName: 'libexample',
packageVersion: '1.2.3',
cvssScore: 7.5,
description: 'Example vulnerability',
references: ['https://example.invalid/CVE-2025-1234']
}
]
};
const policyEvaluationSample = {
schemaVersion: 'StellaOps.PolicyEvaluation@1' as const,
subjectDigest: `sha256:${hex('1')}`,
policyVersion: '2025.10.31',
evaluatedAt: '2025-10-31T12:18:00Z',
outcome: 'pass' as const,
decisions: [
{
policyId: 'policy/access-control',
ruleId: 'rule/allow-latest',
effect: 'allow' as const,
reason: 'No blocking findings'
}
]
};
const riskProfileSample = {
schemaVersion: 'StellaOps.RiskProfileEvidence@1' as const,
subjectDigest: `sha256:${hex('2')}`,
generatedAt: '2025-10-31T12:20:00Z',
riskScore: 42.5,
riskLevel: 'medium' as const,
factors: [
{
name: 'exploitability',
weight: 0.6,
description: 'No known exploits published'
}
]
};
const customEvidenceSample = {
schemaVersion: 'StellaOps.CustomEvidence@1' as const,
subjectDigest: `sha256:${hex('3')}`,
kind: 'org.stellaops.demo/custom',
generatedAt: '2025-10-31T12:25:00Z',
properties: [
{ key: 'note', value: 'Custom attestation payload' }
]
};
test('BuildProvenance round-trip', () => {
const validated = validateBuildProvenance(structuredClone(buildProvenanceSample));
assert.deepEqual(validated, buildProvenanceSample);
const canonical = canonicalizeBuildProvenance(buildProvenanceSample);
assert.equal(canonical, canonicalizeBuildProvenance(structuredClone(buildProvenanceSample)));
});
test('BuildProvenance validation failure on missing materials', () => {
const invalid = structuredClone(buildProvenanceSample);
(invalid.materials as unknown[]) = [];
assert.throws(() => validateBuildProvenance(invalid as unknown), /must contain at least 1 item/);
});
test('SBOMAttestation round-trip', () => {
const validated = validateSbomAttestation(structuredClone(sbomAttestationSample));
assert.deepEqual(validated, sbomAttestationSample);
assert.equal(canonicalizeSbomAttestation(sbomAttestationSample), canonicalizeSbomAttestation(structuredClone(sbomAttestationSample)));
});
test('VexAttestation round-trip', () => {
const validated = validateVexAttestation(structuredClone(vexAttestationSample));
assert.deepEqual(validated, vexAttestationSample);
assert.equal(canonicalizeVexAttestation(vexAttestationSample), canonicalizeVexAttestation(structuredClone(vexAttestationSample)));
});
test('ScanResults round-trip', () => {
const validated = validateScanResults(structuredClone(scanResultsSample));
assert.deepEqual(validated, scanResultsSample);
assert.equal(canonicalizeScanResults(scanResultsSample), canonicalizeScanResults(structuredClone(scanResultsSample)));
});
test('PolicyEvaluation round-trip', () => {
const validated = validatePolicyEvaluation(structuredClone(policyEvaluationSample));
assert.deepEqual(validated, policyEvaluationSample);
assert.equal(canonicalizePolicyEvaluation(policyEvaluationSample), canonicalizePolicyEvaluation(structuredClone(policyEvaluationSample)));
});
test('RiskProfileEvidence round-trip', () => {
const validated = validateRiskProfileEvidence(structuredClone(riskProfileSample));
assert.deepEqual(validated, riskProfileSample);
assert.equal(canonicalizeRiskProfileEvidence(riskProfileSample), canonicalizeRiskProfileEvidence(structuredClone(riskProfileSample)));
});
test('CustomEvidence round-trip', () => {
const validated = validateCustomEvidence(structuredClone(customEvidenceSample));
assert.deepEqual(validated, customEvidenceSample);
assert.equal(canonicalizeCustomEvidence(customEvidenceSample), canonicalizeCustomEvidence(structuredClone(customEvidenceSample)));
});
test('CustomEvidence fails when schema version mismatches', () => {
const invalid = { ...customEvidenceSample, schemaVersion: 'StellaOps.CustomEvidence@9' as const };
assert.throws(() => validateCustomEvidence(invalid as unknown), /must equal 'StellaOps.CustomEvidence@1'/);
});

View File

@@ -0,0 +1,945 @@
// <auto-generated />
// Generated by StellaOps.Attestor.Types.Generator
/* eslint-disable */
/* prettier-ignore */
export const FindingStatusValues = Object.freeze(['detected', 'confirmed', 'fixed', 'not_affected'] as const);
export type FindingStatus = typeof FindingStatusValues[number];
export const PolicyEffectValues = Object.freeze(['allow', 'deny', 'warn'] as const);
export type PolicyEffect = typeof PolicyEffectValues[number];
export const PolicyOutcomeValues = Object.freeze(['pass', 'fail', 'waived'] as const);
export type PolicyOutcome = typeof PolicyOutcomeValues[number];
export const RiskLevelValues = Object.freeze(['critical', 'high', 'medium', 'low', 'informational'] as const);
export type RiskLevel = typeof RiskLevelValues[number];
export const SbomFormatValues = Object.freeze(['CycloneDX-1.6', 'SBOM-3.0.0'] as const);
export type SbomFormat = typeof SbomFormatValues[number];
export const SeverityValues = Object.freeze(['critical', 'high', 'medium', 'low', 'info'] as const);
export type Severity = typeof SeverityValues[number];
export const VexStatusValues = Object.freeze(['not_affected', 'affected', 'under_investigation', 'fixed'] as const);
export type VexStatus = typeof VexStatusValues[number];
export interface BuildMetadata {
buildStartedOn: string;
buildFinishedOn: string;
reproducible?: boolean;
buildInvocationId?: string;
}
export interface BuildProvenance {
schemaVersion: 'StellaOps.BuildProvenance@1';
buildType: string;
builder: BuilderIdentity;
materials: Array<MaterialReference>;
metadata: BuildMetadata;
environment?: EnvironmentMetadata;
}
export interface BuilderIdentity {
id: string;
version?: string;
platform?: string;
}
export interface CustomEvidence {
schemaVersion: 'StellaOps.CustomEvidence@1';
subjectDigest: string;
kind: string;
generatedAt: string;
properties?: Array<CustomProperty>;
}
export interface CustomProperty {
key: string;
value: string;
}
export interface DigestReference {
algorithm: string;
value: string;
}
export interface EnvironmentMetadata {
platform?: string;
imageDigest?: DigestReference;
}
export interface MaterialReference {
uri: string;
digests: Array<DigestReference>;
note?: string;
}
export interface PolicyDecision {
policyId: string;
ruleId: string;
effect: PolicyEffect;
reason?: string;
remediation?: string;
}
export interface PolicyEvaluation {
schemaVersion: 'StellaOps.PolicyEvaluation@1';
subjectDigest: string;
policyVersion: string;
evaluatedAt: string;
outcome: PolicyOutcome;
decisions: Array<PolicyDecision>;
}
export interface RiskFactor {
name: string;
weight: number;
description?: string;
}
export interface RiskProfileEvidence {
schemaVersion: 'StellaOps.RiskProfileEvidence@1';
subjectDigest: string;
generatedAt: string;
riskScore: number;
riskLevel: RiskLevel;
factors: Array<RiskFactor>;
}
export interface SbomAttestation {
schemaVersion: 'StellaOps.SBOMAttestation@1';
subjectDigest: string;
sbomFormat: SbomFormat;
sbomDigest: DigestReference;
sbomUri?: string;
componentCount: number;
packages?: Array<SbomPackage>;
}
export interface SbomPackage {
purl: string;
version?: string;
licenses?: Array<string>;
}
export interface ScanFinding {
id: string;
severity: Severity;
status: FindingStatus;
packageName: string;
packageVersion?: string;
cvssScore?: number;
description?: string;
references?: Array<string>;
}
export interface ScanResults {
schemaVersion: 'StellaOps.ScanResults@1';
subjectDigest: string;
scannerName: string;
scannerVersion: string;
generatedAt: string;
findings: Array<ScanFinding>;
}
export interface VexAttestation {
schemaVersion: 'StellaOps.VEXAttestation@1';
subjectDigest: string;
generatedAt: string;
statements: Array<VexStatement>;
}
export interface VexStatement {
vulnerabilityId: string;
status: VexStatus;
timestamp: string;
justification?: string;
impactStatement?: string;
actionStatement?: string;
references?: Array<string>;
}
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null && !Array.isArray(value);
}
function pathString(path: string[]): string {
return path.length === 0 ? 'value' : `value.${path.join('.')}`;
}
function assertBuildMetadata(value: unknown, path: string[]): asserts value is BuildMetadata {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.buildStartedOn === undefined) {
throw new Error(`${pathString([...path, 'buildStartedOn'])} is required.`);
}
if (typeof value.buildStartedOn !== 'string') {
throw new Error(`${pathString([...path, 'buildStartedOn'])} must be a string.`);
}
if (value.buildFinishedOn === undefined) {
throw new Error(`${pathString([...path, 'buildFinishedOn'])} is required.`);
}
if (typeof value.buildFinishedOn !== 'string') {
throw new Error(`${pathString([...path, 'buildFinishedOn'])} must be a string.`);
}
if (value.reproducible !== undefined) {
if (typeof value.reproducible !== 'boolean') {
throw new Error(`${pathString([...path, 'reproducible'])} must be a boolean.`);
}
}
if (value.buildInvocationId !== undefined) {
if (typeof value.buildInvocationId !== 'string') {
throw new Error(`${pathString([...path, 'buildInvocationId'])} must be a string.`);
}
}
}
function assertBuildProvenance(value: unknown, path: string[]): asserts value is BuildProvenance {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.BuildProvenance@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.BuildProvenance@1'.`);
}
if (value.buildType === undefined) {
throw new Error(`${pathString([...path, 'buildType'])} is required.`);
}
if (typeof value.buildType !== 'string') {
throw new Error(`${pathString([...path, 'buildType'])} must be a string.`);
}
if (value.builder === undefined) {
throw new Error(`${pathString([...path, 'builder'])} is required.`);
}
assertBuilderIdentity(value.builder, [...path, 'builder']);
if (value.materials === undefined) {
throw new Error(`${pathString([...path, 'materials'])} is required.`);
}
if (!Array.isArray(value.materials)) {
throw new Error(`${pathString([...path, 'materials'])} must be an array.`);
}
if (value.materials.length < 1) {
throw new Error(`${pathString([...path, 'materials'])} must contain at least 1 item(s).`);
}
for (let i = 0; i < value.materials.length; i += 1) {
assertMaterialReference(value.materials[i], [...[...path, 'materials'], String(i)]);
}
if (value.metadata === undefined) {
throw new Error(`${pathString([...path, 'metadata'])} is required.`);
}
assertBuildMetadata(value.metadata, [...path, 'metadata']);
if (value.environment !== undefined) {
assertEnvironmentMetadata(value.environment, [...path, 'environment']);
}
}
function assertBuilderIdentity(value: unknown, path: string[]): asserts value is BuilderIdentity {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.id === undefined) {
throw new Error(`${pathString([...path, 'id'])} is required.`);
}
if (typeof value.id !== 'string') {
throw new Error(`${pathString([...path, 'id'])} must be a string.`);
}
if (value.version !== undefined) {
if (typeof value.version !== 'string') {
throw new Error(`${pathString([...path, 'version'])} must be a string.`);
}
}
if (value.platform !== undefined) {
if (typeof value.platform !== 'string') {
throw new Error(`${pathString([...path, 'platform'])} must be a string.`);
}
}
}
function assertCustomEvidence(value: unknown, path: string[]): asserts value is CustomEvidence {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.CustomEvidence@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.CustomEvidence@1'.`);
}
if (value.subjectDigest === undefined) {
throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`);
}
if (typeof value.subjectDigest !== 'string') {
throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`);
}
if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) {
throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`);
}
if (value.kind === undefined) {
throw new Error(`${pathString([...path, 'kind'])} is required.`);
}
if (typeof value.kind !== 'string') {
throw new Error(`${pathString([...path, 'kind'])} must be a string.`);
}
if (value.generatedAt === undefined) {
throw new Error(`${pathString([...path, 'generatedAt'])} is required.`);
}
if (typeof value.generatedAt !== 'string') {
throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`);
}
if (value.properties !== undefined) {
if (!Array.isArray(value.properties)) {
throw new Error(`${pathString([...path, 'properties'])} must be an array.`);
}
for (let i = 0; i < value.properties.length; i += 1) {
assertCustomProperty(value.properties[i], [...[...path, 'properties'], String(i)]);
}
}
}
function assertCustomProperty(value: unknown, path: string[]): asserts value is CustomProperty {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.key === undefined) {
throw new Error(`${pathString([...path, 'key'])} is required.`);
}
if (typeof value.key !== 'string') {
throw new Error(`${pathString([...path, 'key'])} must be a string.`);
}
if (value.value === undefined) {
throw new Error(`${pathString([...path, 'value'])} is required.`);
}
if (typeof value.value !== 'string') {
throw new Error(`${pathString([...path, 'value'])} must be a string.`);
}
}
function assertDigestReference(value: unknown, path: string[]): asserts value is DigestReference {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.algorithm === undefined) {
throw new Error(`${pathString([...path, 'algorithm'])} is required.`);
}
if (typeof value.algorithm !== 'string') {
throw new Error(`${pathString([...path, 'algorithm'])} must be a string.`);
}
if (value.value === undefined) {
throw new Error(`${pathString([...path, 'value'])} is required.`);
}
if (typeof value.value !== 'string') {
throw new Error(`${pathString([...path, 'value'])} must be a string.`);
}
if (!/^[A-Fa-f0-9]{64}$/.test(value.value)) {
throw new Error(`${pathString([...path, 'value'])} does not match expected format.`);
}
}
function assertEnvironmentMetadata(value: unknown, path: string[]): asserts value is EnvironmentMetadata {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.platform !== undefined) {
if (typeof value.platform !== 'string') {
throw new Error(`${pathString([...path, 'platform'])} must be a string.`);
}
}
if (value.imageDigest !== undefined) {
assertDigestReference(value.imageDigest, [...path, 'imageDigest']);
}
}
function assertMaterialReference(value: unknown, path: string[]): asserts value is MaterialReference {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.uri === undefined) {
throw new Error(`${pathString([...path, 'uri'])} is required.`);
}
if (typeof value.uri !== 'string') {
throw new Error(`${pathString([...path, 'uri'])} must be a string.`);
}
if (value.digests === undefined) {
throw new Error(`${pathString([...path, 'digests'])} is required.`);
}
if (!Array.isArray(value.digests)) {
throw new Error(`${pathString([...path, 'digests'])} must be an array.`);
}
if (value.digests.length < 1) {
throw new Error(`${pathString([...path, 'digests'])} must contain at least 1 item(s).`);
}
for (let i = 0; i < value.digests.length; i += 1) {
assertDigestReference(value.digests[i], [...[...path, 'digests'], String(i)]);
}
if (value.note !== undefined) {
if (typeof value.note !== 'string') {
throw new Error(`${pathString([...path, 'note'])} must be a string.`);
}
}
}
function assertPolicyDecision(value: unknown, path: string[]): asserts value is PolicyDecision {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.policyId === undefined) {
throw new Error(`${pathString([...path, 'policyId'])} is required.`);
}
if (typeof value.policyId !== 'string') {
throw new Error(`${pathString([...path, 'policyId'])} must be a string.`);
}
if (value.ruleId === undefined) {
throw new Error(`${pathString([...path, 'ruleId'])} is required.`);
}
if (typeof value.ruleId !== 'string') {
throw new Error(`${pathString([...path, 'ruleId'])} must be a string.`);
}
if (value.effect === undefined) {
throw new Error(`${pathString([...path, 'effect'])} is required.`);
}
if (!PolicyEffectValues.includes(value.effect as PolicyEffect)) {
throw new Error(`${pathString([...path, 'effect'])} must be one of ${PolicyEffectValues.join(', ')}`);
}
if (value.reason !== undefined) {
if (typeof value.reason !== 'string') {
throw new Error(`${pathString([...path, 'reason'])} must be a string.`);
}
}
if (value.remediation !== undefined) {
if (typeof value.remediation !== 'string') {
throw new Error(`${pathString([...path, 'remediation'])} must be a string.`);
}
}
}
function assertPolicyEvaluation(value: unknown, path: string[]): asserts value is PolicyEvaluation {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.PolicyEvaluation@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.PolicyEvaluation@1'.`);
}
if (value.subjectDigest === undefined) {
throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`);
}
if (typeof value.subjectDigest !== 'string') {
throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`);
}
if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) {
throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`);
}
if (value.policyVersion === undefined) {
throw new Error(`${pathString([...path, 'policyVersion'])} is required.`);
}
if (typeof value.policyVersion !== 'string') {
throw new Error(`${pathString([...path, 'policyVersion'])} must be a string.`);
}
if (value.evaluatedAt === undefined) {
throw new Error(`${pathString([...path, 'evaluatedAt'])} is required.`);
}
if (typeof value.evaluatedAt !== 'string') {
throw new Error(`${pathString([...path, 'evaluatedAt'])} must be a string.`);
}
if (value.outcome === undefined) {
throw new Error(`${pathString([...path, 'outcome'])} is required.`);
}
if (!PolicyOutcomeValues.includes(value.outcome as PolicyOutcome)) {
throw new Error(`${pathString([...path, 'outcome'])} must be one of ${PolicyOutcomeValues.join(', ')}`);
}
if (value.decisions === undefined) {
throw new Error(`${pathString([...path, 'decisions'])} is required.`);
}
if (!Array.isArray(value.decisions)) {
throw new Error(`${pathString([...path, 'decisions'])} must be an array.`);
}
for (let i = 0; i < value.decisions.length; i += 1) {
assertPolicyDecision(value.decisions[i], [...[...path, 'decisions'], String(i)]);
}
}
function assertRiskFactor(value: unknown, path: string[]): asserts value is RiskFactor {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.name === undefined) {
throw new Error(`${pathString([...path, 'name'])} is required.`);
}
if (typeof value.name !== 'string') {
throw new Error(`${pathString([...path, 'name'])} must be a string.`);
}
if (value.weight === undefined) {
throw new Error(`${pathString([...path, 'weight'])} is required.`);
}
if (typeof value.weight !== 'number') {
throw new Error(`${pathString([...path, 'weight'])} must be a number.`);
}
if (value.weight < 0) {
throw new Error(`${pathString([...path, 'weight'])} must be >= 0`);
}
if (value.weight > 1) {
throw new Error(`${pathString([...path, 'weight'])} must be <= 1`);
}
if (value.description !== undefined) {
if (typeof value.description !== 'string') {
throw new Error(`${pathString([...path, 'description'])} must be a string.`);
}
}
}
function assertRiskProfileEvidence(value: unknown, path: string[]): asserts value is RiskProfileEvidence {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.RiskProfileEvidence@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.RiskProfileEvidence@1'.`);
}
if (value.subjectDigest === undefined) {
throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`);
}
if (typeof value.subjectDigest !== 'string') {
throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`);
}
if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) {
throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`);
}
if (value.generatedAt === undefined) {
throw new Error(`${pathString([...path, 'generatedAt'])} is required.`);
}
if (typeof value.generatedAt !== 'string') {
throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`);
}
if (value.riskScore === undefined) {
throw new Error(`${pathString([...path, 'riskScore'])} is required.`);
}
if (typeof value.riskScore !== 'number') {
throw new Error(`${pathString([...path, 'riskScore'])} must be a number.`);
}
if (value.riskScore < 0) {
throw new Error(`${pathString([...path, 'riskScore'])} must be >= 0`);
}
if (value.riskScore > 100) {
throw new Error(`${pathString([...path, 'riskScore'])} must be <= 100`);
}
if (value.riskLevel === undefined) {
throw new Error(`${pathString([...path, 'riskLevel'])} is required.`);
}
if (!RiskLevelValues.includes(value.riskLevel as RiskLevel)) {
throw new Error(`${pathString([...path, 'riskLevel'])} must be one of ${RiskLevelValues.join(', ')}`);
}
if (value.factors === undefined) {
throw new Error(`${pathString([...path, 'factors'])} is required.`);
}
if (!Array.isArray(value.factors)) {
throw new Error(`${pathString([...path, 'factors'])} must be an array.`);
}
for (let i = 0; i < value.factors.length; i += 1) {
assertRiskFactor(value.factors[i], [...[...path, 'factors'], String(i)]);
}
}
function assertSbomAttestation(value: unknown, path: string[]): asserts value is SbomAttestation {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.SBOMAttestation@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.SBOMAttestation@1'.`);
}
if (value.subjectDigest === undefined) {
throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`);
}
if (typeof value.subjectDigest !== 'string') {
throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`);
}
if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) {
throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`);
}
if (value.sbomFormat === undefined) {
throw new Error(`${pathString([...path, 'sbomFormat'])} is required.`);
}
if (!SbomFormatValues.includes(value.sbomFormat as SbomFormat)) {
throw new Error(`${pathString([...path, 'sbomFormat'])} must be one of ${SbomFormatValues.join(', ')}`);
}
if (value.sbomDigest === undefined) {
throw new Error(`${pathString([...path, 'sbomDigest'])} is required.`);
}
assertDigestReference(value.sbomDigest, [...path, 'sbomDigest']);
if (value.sbomUri !== undefined) {
if (typeof value.sbomUri !== 'string') {
throw new Error(`${pathString([...path, 'sbomUri'])} must be a string.`);
}
}
if (value.componentCount === undefined) {
throw new Error(`${pathString([...path, 'componentCount'])} is required.`);
}
if (typeof value.componentCount !== 'number') {
throw new Error(`${pathString([...path, 'componentCount'])} must be a number.`);
}
if (value.componentCount < 0) {
throw new Error(`${pathString([...path, 'componentCount'])} must be >= 0`);
}
if (value.packages !== undefined) {
if (!Array.isArray(value.packages)) {
throw new Error(`${pathString([...path, 'packages'])} must be an array.`);
}
for (let i = 0; i < value.packages.length; i += 1) {
assertSbomPackage(value.packages[i], [...[...path, 'packages'], String(i)]);
}
}
}
function assertSbomPackage(value: unknown, path: string[]): asserts value is SbomPackage {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.purl === undefined) {
throw new Error(`${pathString([...path, 'purl'])} is required.`);
}
if (typeof value.purl !== 'string') {
throw new Error(`${pathString([...path, 'purl'])} must be a string.`);
}
if (value.version !== undefined) {
if (typeof value.version !== 'string') {
throw new Error(`${pathString([...path, 'version'])} must be a string.`);
}
}
if (value.licenses !== undefined) {
if (!Array.isArray(value.licenses)) {
throw new Error(`${pathString([...path, 'licenses'])} must be an array.`);
}
if (value.licenses.length < 1) {
throw new Error(`${pathString([...path, 'licenses'])} must contain at least 1 item(s).`);
}
for (let i = 0; i < value.licenses.length; i += 1) {
if (typeof value.licenses[i] !== 'string') {
throw new Error(`${pathString([...[...path, 'licenses'], String(i)])} must be a string.`);
}
}
}
}
function assertScanFinding(value: unknown, path: string[]): asserts value is ScanFinding {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.id === undefined) {
throw new Error(`${pathString([...path, 'id'])} is required.`);
}
if (typeof value.id !== 'string') {
throw new Error(`${pathString([...path, 'id'])} must be a string.`);
}
if (value.severity === undefined) {
throw new Error(`${pathString([...path, 'severity'])} is required.`);
}
if (!SeverityValues.includes(value.severity as Severity)) {
throw new Error(`${pathString([...path, 'severity'])} must be one of ${SeverityValues.join(', ')}`);
}
if (value.status === undefined) {
throw new Error(`${pathString([...path, 'status'])} is required.`);
}
if (!FindingStatusValues.includes(value.status as FindingStatus)) {
throw new Error(`${pathString([...path, 'status'])} must be one of ${FindingStatusValues.join(', ')}`);
}
if (value.packageName === undefined) {
throw new Error(`${pathString([...path, 'packageName'])} is required.`);
}
if (typeof value.packageName !== 'string') {
throw new Error(`${pathString([...path, 'packageName'])} must be a string.`);
}
if (value.packageVersion !== undefined) {
if (typeof value.packageVersion !== 'string') {
throw new Error(`${pathString([...path, 'packageVersion'])} must be a string.`);
}
}
if (value.cvssScore !== undefined) {
if (typeof value.cvssScore !== 'number') {
throw new Error(`${pathString([...path, 'cvssScore'])} must be a number.`);
}
if (value.cvssScore < 0) {
throw new Error(`${pathString([...path, 'cvssScore'])} must be >= 0`);
}
if (value.cvssScore > 10) {
throw new Error(`${pathString([...path, 'cvssScore'])} must be <= 10`);
}
}
if (value.description !== undefined) {
if (typeof value.description !== 'string') {
throw new Error(`${pathString([...path, 'description'])} must be a string.`);
}
}
if (value.references !== undefined) {
if (!Array.isArray(value.references)) {
throw new Error(`${pathString([...path, 'references'])} must be an array.`);
}
if (value.references.length < 1) {
throw new Error(`${pathString([...path, 'references'])} must contain at least 1 item(s).`);
}
for (let i = 0; i < value.references.length; i += 1) {
if (typeof value.references[i] !== 'string') {
throw new Error(`${pathString([...[...path, 'references'], String(i)])} must be a string.`);
}
}
}
}
function assertScanResults(value: unknown, path: string[]): asserts value is ScanResults {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.ScanResults@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.ScanResults@1'.`);
}
if (value.subjectDigest === undefined) {
throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`);
}
if (typeof value.subjectDigest !== 'string') {
throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`);
}
if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) {
throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`);
}
if (value.scannerName === undefined) {
throw new Error(`${pathString([...path, 'scannerName'])} is required.`);
}
if (typeof value.scannerName !== 'string') {
throw new Error(`${pathString([...path, 'scannerName'])} must be a string.`);
}
if (value.scannerVersion === undefined) {
throw new Error(`${pathString([...path, 'scannerVersion'])} is required.`);
}
if (typeof value.scannerVersion !== 'string') {
throw new Error(`${pathString([...path, 'scannerVersion'])} must be a string.`);
}
if (value.generatedAt === undefined) {
throw new Error(`${pathString([...path, 'generatedAt'])} is required.`);
}
if (typeof value.generatedAt !== 'string') {
throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`);
}
if (value.findings === undefined) {
throw new Error(`${pathString([...path, 'findings'])} is required.`);
}
if (!Array.isArray(value.findings)) {
throw new Error(`${pathString([...path, 'findings'])} must be an array.`);
}
for (let i = 0; i < value.findings.length; i += 1) {
assertScanFinding(value.findings[i], [...[...path, 'findings'], String(i)]);
}
}
function assertVexAttestation(value: unknown, path: string[]): asserts value is VexAttestation {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.schemaVersion === undefined) {
throw new Error(`${pathString([...path, 'schemaVersion'])} is required.`);
}
if (typeof value.schemaVersion !== 'string') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must be a string.`);
}
if (value.schemaVersion !== 'StellaOps.VEXAttestation@1') {
throw new Error(`${pathString([...path, 'schemaVersion'])} must equal 'StellaOps.VEXAttestation@1'.`);
}
if (value.subjectDigest === undefined) {
throw new Error(`${pathString([...path, 'subjectDigest'])} is required.`);
}
if (typeof value.subjectDigest !== 'string') {
throw new Error(`${pathString([...path, 'subjectDigest'])} must be a string.`);
}
if (!/^sha256:[A-Fa-f0-9]{64}$/.test(value.subjectDigest)) {
throw new Error(`${pathString([...path, 'subjectDigest'])} does not match expected format.`);
}
if (value.generatedAt === undefined) {
throw new Error(`${pathString([...path, 'generatedAt'])} is required.`);
}
if (typeof value.generatedAt !== 'string') {
throw new Error(`${pathString([...path, 'generatedAt'])} must be a string.`);
}
if (value.statements === undefined) {
throw new Error(`${pathString([...path, 'statements'])} is required.`);
}
if (!Array.isArray(value.statements)) {
throw new Error(`${pathString([...path, 'statements'])} must be an array.`);
}
if (value.statements.length < 1) {
throw new Error(`${pathString([...path, 'statements'])} must contain at least 1 item(s).`);
}
for (let i = 0; i < value.statements.length; i += 1) {
assertVexStatement(value.statements[i], [...[...path, 'statements'], String(i)]);
}
}
function assertVexStatement(value: unknown, path: string[]): asserts value is VexStatement {
if (!isRecord(value)) {
throw new Error(`${pathString(path)} must be an object.`);
}
if (value.vulnerabilityId === undefined) {
throw new Error(`${pathString([...path, 'vulnerabilityId'])} is required.`);
}
if (typeof value.vulnerabilityId !== 'string') {
throw new Error(`${pathString([...path, 'vulnerabilityId'])} must be a string.`);
}
if (value.status === undefined) {
throw new Error(`${pathString([...path, 'status'])} is required.`);
}
if (!VexStatusValues.includes(value.status as VexStatus)) {
throw new Error(`${pathString([...path, 'status'])} must be one of ${VexStatusValues.join(', ')}`);
}
if (value.timestamp === undefined) {
throw new Error(`${pathString([...path, 'timestamp'])} is required.`);
}
if (typeof value.timestamp !== 'string') {
throw new Error(`${pathString([...path, 'timestamp'])} must be a string.`);
}
if (value.justification !== undefined) {
if (typeof value.justification !== 'string') {
throw new Error(`${pathString([...path, 'justification'])} must be a string.`);
}
}
if (value.impactStatement !== undefined) {
if (typeof value.impactStatement !== 'string') {
throw new Error(`${pathString([...path, 'impactStatement'])} must be a string.`);
}
}
if (value.actionStatement !== undefined) {
if (typeof value.actionStatement !== 'string') {
throw new Error(`${pathString([...path, 'actionStatement'])} must be a string.`);
}
}
if (value.references !== undefined) {
if (!Array.isArray(value.references)) {
throw new Error(`${pathString([...path, 'references'])} must be an array.`);
}
if (value.references.length < 1) {
throw new Error(`${pathString([...path, 'references'])} must contain at least 1 item(s).`);
}
for (let i = 0; i < value.references.length; i += 1) {
if (typeof value.references[i] !== 'string') {
throw new Error(`${pathString([...[...path, 'references'], String(i)])} must be a string.`);
}
}
}
}
export function validateBuildProvenance(value: unknown): BuildProvenance {
assertBuildProvenance(value, []);
return value as BuildProvenance;
}
export function canonicalizeBuildProvenance(value: BuildProvenance): string {
assertBuildProvenance(value, []);
return canonicalStringify(value);
}
export function validateCustomEvidence(value: unknown): CustomEvidence {
assertCustomEvidence(value, []);
return value as CustomEvidence;
}
export function canonicalizeCustomEvidence(value: CustomEvidence): string {
assertCustomEvidence(value, []);
return canonicalStringify(value);
}
export function validatePolicyEvaluation(value: unknown): PolicyEvaluation {
assertPolicyEvaluation(value, []);
return value as PolicyEvaluation;
}
export function canonicalizePolicyEvaluation(value: PolicyEvaluation): string {
assertPolicyEvaluation(value, []);
return canonicalStringify(value);
}
export function validateRiskProfileEvidence(value: unknown): RiskProfileEvidence {
assertRiskProfileEvidence(value, []);
return value as RiskProfileEvidence;
}
export function canonicalizeRiskProfileEvidence(value: RiskProfileEvidence): string {
assertRiskProfileEvidence(value, []);
return canonicalStringify(value);
}
export function validateSbomAttestation(value: unknown): SbomAttestation {
assertSbomAttestation(value, []);
return value as SbomAttestation;
}
export function canonicalizeSbomAttestation(value: SbomAttestation): string {
assertSbomAttestation(value, []);
return canonicalStringify(value);
}
export function validateScanResults(value: unknown): ScanResults {
assertScanResults(value, []);
return value as ScanResults;
}
export function canonicalizeScanResults(value: ScanResults): string {
assertScanResults(value, []);
return canonicalStringify(value);
}
export function validateVexAttestation(value: unknown): VexAttestation {
assertVexAttestation(value, []);
return value as VexAttestation;
}
export function canonicalizeVexAttestation(value: VexAttestation): string {
assertVexAttestation(value, []);
return canonicalStringify(value);
}
function canonicalStringify(input: unknown): string {
return JSON.stringify(sortValue(input));
}
function sortValue(value: unknown): unknown {
if (Array.isArray(value)) {
return value.map(sortValue);
}
if (isRecord(value)) {
const ordered: Record<string, unknown> = {};
const keys = Object.keys(value).sort();
for (const key of keys) {
ordered[key] = sortValue(value[key]);
}
return ordered;
}
return value;
}

View File

@@ -0,0 +1,237 @@
{
"name": "@stellaops/attestor-types",
"version": "0.1.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@stellaops/attestor-types",
"version": "0.1.0",
"devDependencies": {
"@types/node": "^22.7.4",
"ts-node": "^10.9.2",
"typescript": "^5.6.3"
}
},
"node_modules/@cspotcode/source-map-support": {
"version": "0.8.1",
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
"integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/trace-mapping": "0.3.9"
},
"engines": {
"node": ">=12"
}
},
"node_modules/@jridgewell/resolve-uri": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
"integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6.0.0"
}
},
"node_modules/@jridgewell/sourcemap-codec": {
"version": "1.5.5",
"resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
"integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
"dev": true,
"license": "MIT"
},
"node_modules/@jridgewell/trace-mapping": {
"version": "0.3.9",
"resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
"integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@jridgewell/resolve-uri": "^3.0.3",
"@jridgewell/sourcemap-codec": "^1.4.10"
}
},
"node_modules/@tsconfig/node10": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.11.tgz",
"integrity": "sha512-DcRjDCujK/kCk/cUe8Xz8ZSpm8mS3mNNpta+jGCA6USEDfktlNvm1+IuZ9eTcDbNk41BHwpHHeW+N1lKCz4zOw==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node12": {
"version": "1.0.11",
"resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
"integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node14": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
"integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
"dev": true,
"license": "MIT"
},
"node_modules/@tsconfig/node16": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
"integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
"dev": true,
"license": "MIT"
},
"node_modules/@types/node": {
"version": "22.18.13",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.18.13.tgz",
"integrity": "sha512-Bo45YKIjnmFtv6I1TuC8AaHBbqXtIo+Om5fE4QiU1Tj8QR/qt+8O3BAtOimG5IFmwaWiPmB3Mv3jtYzBA4Us2A==",
"dev": true,
"license": "MIT",
"peer": true,
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/acorn": {
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
"integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
"dev": true,
"license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/acorn-walk": {
"version": "8.3.4",
"resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.4.tgz",
"integrity": "sha512-ueEepnujpqee2o5aIYnvHU6C0A42MNdsIDeqy5BydrkuC5R1ZuUFnm27EeFJGoEHJQgn3uleRvmTXaJgfXbt4g==",
"dev": true,
"license": "MIT",
"dependencies": {
"acorn": "^8.11.0"
},
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/arg": {
"version": "4.1.3",
"resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
"integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
"dev": true,
"license": "MIT"
},
"node_modules/create-require": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
"integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
"dev": true,
"license": "MIT"
},
"node_modules/diff": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
"dev": true,
"license": "BSD-3-Clause",
"engines": {
"node": ">=0.3.1"
}
},
"node_modules/make-error": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
"integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
"dev": true,
"license": "ISC"
},
"node_modules/ts-node": {
"version": "10.9.2",
"resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
"integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"@cspotcode/source-map-support": "^0.8.0",
"@tsconfig/node10": "^1.0.7",
"@tsconfig/node12": "^1.0.7",
"@tsconfig/node14": "^1.0.0",
"@tsconfig/node16": "^1.0.2",
"acorn": "^8.4.1",
"acorn-walk": "^8.1.1",
"arg": "^4.1.0",
"create-require": "^1.1.0",
"diff": "^4.0.1",
"make-error": "^1.1.1",
"v8-compile-cache-lib": "^3.0.1",
"yn": "3.1.1"
},
"bin": {
"ts-node": "dist/bin.js",
"ts-node-cwd": "dist/bin-cwd.js",
"ts-node-esm": "dist/bin-esm.js",
"ts-node-script": "dist/bin-script.js",
"ts-node-transpile-only": "dist/bin-transpile.js",
"ts-script": "dist/bin-script-deprecated.js"
},
"peerDependencies": {
"@swc/core": ">=1.2.50",
"@swc/wasm": ">=1.2.50",
"@types/node": "*",
"typescript": ">=2.7"
},
"peerDependenciesMeta": {
"@swc/core": {
"optional": true
},
"@swc/wasm": {
"optional": true
}
}
},
"node_modules/typescript": {
"version": "5.9.3",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
"integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
"dev": true,
"license": "Apache-2.0",
"peer": true,
"bin": {
"tsc": "bin/tsc",
"tsserver": "bin/tsserver"
},
"engines": {
"node": ">=14.17"
}
},
"node_modules/undici-types": {
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
},
"node_modules/v8-compile-cache-lib": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
"integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
"dev": true,
"license": "MIT"
},
"node_modules/yn": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
"integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=6"
}
}
}
}

View File

@@ -0,0 +1,16 @@
{
"name": "@stellaops/attestor-types",
"version": "0.1.0",
"private": true,
"type": "module",
"scripts": {
"build": "tsc --project tsconfig.json",
"clean": "rm -rf dist",
"test": "npm run build && node --test dist/index.test.js"
},
"devDependencies": {
"@types/node": "^22.7.4",
"ts-node": "^10.9.2",
"typescript": "^5.6.3"
}
}

View File

@@ -0,0 +1,19 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "node",
"outDir": "dist",
"declaration": true,
"declarationMap": false,
"sourceMap": false,
"strict": true,
"esModuleInterop": true,
"forceConsistentCasingInFileNames": true,
"skipLibCheck": true
},
"include": [
"index.ts",
"index.test.ts"
]
}

View File

@@ -0,0 +1,23 @@
# StellaOps Attestation Golden Samples
This directory contains deterministic JSON fixtures used across unit tests, regression checks, and documentation snippets for the Attestor predicate family.
| File | Predicate | Notes |
| --- | --- | --- |
| `build-provenance.v1.json` | `StellaOps.BuildProvenance@1` | Mirrors SLSA provenance fields with StellaOps metadata extensions. |
| `sbom-attestation.v1.json` | `StellaOps.SBOMAttestation@1` | Links to CycloneDX 1.6 BOM data and dependency relationships. |
| `scan-results.v1.json` | `StellaOps.ScanResults@1` | Captures multi-type findings (vulnerability, policy) with lattice scores. |
| `policy-evaluation.v1.json` | `StellaOps.PolicyEvaluation@1` | Documents deny verdict trace with digests for inputs and policies. |
| `vex-attestation.v1.json` | `StellaOps.VEXAttestation@1` | Provides OpenVEX-compatible statements for the Scanner image. |
| `risk-profile-evidence.v1.json` | `StellaOps.RiskProfileEvidence@1` | Compresses risk matrix outputs from the Risk Engine. |
| `custom-evidence.v1.json` | `StellaOps.CustomEvidence@1` | Shows a Zastava runtime hardening checklist embedded as custom evidence. |
All fixtures share the same `subject` digest (`ghcr.io/stellaops/scanner@sha256:d5f5…`) so they can be chained together inside multi-artefact verification tests. Keys are alphabetically ordered in every object to keep canonical JSON generation straightforward.
When adding or updating fixtures:
1. Preserve field ordering and avoid timestamps without explicit values.
2. Update `AttestationGoldenSamplesTests.cs` to assert any new invariants.
3. Reference the fixture in the relevant module documentation so downstream users can discover it.
Run `npm run docs:attestor:validate` from the repository root to confirm fixtures, schemas, and generated SDKs remain in lock-step.

View File

@@ -0,0 +1,71 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"buildDefinition": {
"buildType": "https://stella-ops.org/build/docker-image@v1",
"builder": "stellaops://pipelines/scanner-ci",
"environment": {
"image": "ghcr.io/stellaops/buildkit:2025.10",
"platform": "linux/amd64",
"workerPool": "us-west-2/buildfarm/a"
},
"source": {
"configDigest": {
"sha256": "f135b1eb59b38c46b7c9f95abfa23d8739d9a71717f7ceebf2e0fdf75c9b8d63"
},
"entryPoint": "scanner/Dockerfile",
"uri": "git+https://git.stella-ops.org/stellaops/scanner.git@refs/heads/main"
}
},
"materials": [
{
"digest": {
"sha256": "61e7a7d1d0a64e788d4a32294e7255f2667ef45ef8d358b3f6262e2de3b2c13d"
},
"uri": "git+https://git.stella-ops.org/stellaops/scanner.git#src"
},
{
"digest": {
"sha256": "3f2dc5d957bbf9570da0e7af0fde61ebc9e32b0ad156bea6f3cfa4d94cda740a"
},
"uri": "oci://ghcr.io/stellaops/base-images/builder:2025.09"
}
],
"metadata": {
"builtOn": "2025-10-29T18:22:14Z",
"durationSeconds": 186,
"invocationId": "urn:uuid:7ac7b9b4-0f6a-4ad1-9d42-9e0df7bd4a94",
"licenseBundle": "stellaops://licenses/enterprise@2025-10"
},
"runDetails": {
"arguments": [
"buildx",
"bake",
"--file",
"scanner/docker-bake.hcl",
"--set",
"sbom.mode=strict"
],
"builderImageDigest": "sha256:6ee70a4014258f9c0a40e10b5f5a4b1c6497e5d6b9cd848771c3d79f7f0d91da",
"exitCode": 0,
"logs": [
{
"digest": {
"sha256": "4b4fd1f46633f475aa1ed58ec0d95af0ed3e4015d9d41fce7883c0e650f730fb"
},
"uri": "s3://attestor-logs/scanner-ci/2025-10-29/build.log"
}
]
}
},
"predicateType": "StellaOps.BuildProvenance@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,48 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"contentType": "application/json",
"description": "Runtime hardening checklist exported from Kubernetes admission controller.",
"payload": {
"controlResults": [
{
"control": "NSA-CISA-Kubernetes-1.2.1",
"evidence": "pod security baseline profile enforced",
"status": "pass"
},
{
"control": "NSA-CISA-Kubernetes-1.3.4",
"evidence": "imagePullPolicy set to Always",
"status": "pass"
},
{
"control": "NSA-CISA-Kubernetes-1.4.2",
"evidence": "no container runs as privileged",
"status": "pass"
},
{
"control": "NSA-CISA-Kubernetes-1.5.1",
"evidence": "hostPath mounts limited to /var/run/secrets",
"status": "warn"
}
],
"generator": "stellaops://zastava/observer@2025.10.0",
"scopedNamespace": "scanner-prod"
},
"schema": "https://stella-ops.org/custom/nsacisa-runtime-checklist@v1",
"tags": [
"runtime-hardening",
"zastava"
]
},
"predicateType": "StellaOps.CustomEvidence@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,69 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"decision": "deny",
"explain": {
"failingAssertions": [
{
"message": "critical vulnerability CVE-2025-30104 lacks mitigating VEX statement",
"path": "controls/critical-vulns.rego#L42"
},
{
"message": "policy `runtime-tls` requires TLS 1.3 or higher",
"path": "controls/runtime-tls.rego#L18"
}
],
"inputs": {
"policyDigest": "sha256:73acd8cb50ea52b9b98534fada689755fbdb0a12f8bd4f6069276e7ce21a0bd8",
"sbomDigest": "sha256:7c8d89e9c5f7ca1be3f75653d3cb4dd511ee1f65c1b9bc606cd19c578b57e9c2",
"vexDigest": "sha256:72b3ead4c7de0f65a3be9a9d73b7bc2840f1494de3d1666d4010e23bb1b82768"
},
"trace": [
{
"decision": "deny",
"policy": "controls/critical-vulns.rego",
"rule": "require_vex_or_patch"
},
{
"decision": "warn",
"policy": "controls/license.rego",
"rule": "allow_mit_or_apache"
}
]
},
"policyDigest": "sha256:73acd8cb50ea52b9b98534fada689755fbdb0a12f8bd4f6069276e7ce21a0bd8",
"policyVersion": "2025.10.0",
"rules": [
{
"id": "controls/critical-vulns",
"outcome": "deny",
"type": "rego"
},
{
"id": "controls/runtime-tls",
"outcome": "deny",
"type": "rego"
},
{
"id": "controls/license",
"outcome": "warn",
"type": "rego"
}
],
"summary": {
"decision": "deny",
"evaluatedAt": "2025-10-29T18:24:10Z",
"inputDigest": "sha256:8f9047f8cb22dafeb8fff2e8e75ef4e93a005f619210d4c4282c86aa2addc81e"
}
},
"predicateType": "StellaOps.PolicyEvaluation@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,50 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"baseline": {
"profile": "stellaops://risk/profiles/container-runtime@2025.10",
"source": "RiskEngine 4.2.0",
"timestamp": "2025-10-29T18:24:12Z"
},
"riskMatrix": {
"categories": [
{
"dimension": "attack_surface",
"level": "medium",
"rationale": "External TCP services restricted to TLS; ephemeral build pods."
},
{
"dimension": "supply_chain",
"level": "low",
"rationale": "All dependencies pinned with SBOM attestations."
},
{
"dimension": "runtime_controls",
"level": "medium",
"rationale": "Admission policy enforced; runtime drift monitoring pending rollout."
}
]
},
"scoring": {
"method": "stellaops://risk/models/v2",
"score": 682,
"tier": "moderate"
},
"vectors": {
"cisaKevCoverage": 1.0,
"exploitMaturity": "poc_available",
"patchLatencyDays": 2,
"unknownServiceRatio": 0.08
}
},
"predicateType": "StellaOps.RiskProfileEvidence@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,80 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"document": {
"bomRef": "urn:uuid:0cda40b3-4e89-4a89-8d94-65011fac1cb1",
"created": "2025-10-29T18:22:33Z",
"digest": {
"sha256": "7c8d89e9c5f7ca1be3f75653d3cb4dd511ee1f65c1b9bc606cd19c578b57e9c2"
},
"name": "scanner-webservice",
"version": "2025.10.29+build.186"
},
"packages": [
{
"bomRef": "pkg:docker/alpine@3.19.2?arch=amd64",
"licenses": [
{
"expression": "MIT"
}
],
"name": "alpine",
"purl": "pkg:alpine/alpine-base@3.19.2?arch=amd64",
"version": "3.19.2"
},
{
"bomRef": "pkg:nuget/MongoDB.Driver@2.23.0",
"licenses": [
{
"expression": "Apache-2.0"
}
],
"name": "MongoDB.Driver",
"purl": "pkg:nuget/MongoDB.Driver@2.23.0",
"version": "2.23.0"
},
{
"bomRef": "pkg:nuget/Serilog@3.1.0",
"licenses": [
{
"expression": "Apache-2.0"
}
],
"name": "Serilog",
"purl": "pkg:nuget/Serilog@3.1.0",
"version": "3.1.0"
}
],
"relationships": [
{
"source": "pkg:docker/alpine@3.19.2?arch=amd64",
"target": "pkg:nuget/MongoDB.Driver@2.23.0",
"type": "contains"
},
{
"source": "pkg:docker/alpine@3.19.2?arch=amd64",
"target": "pkg:nuget/Serilog@3.1.0",
"type": "contains"
}
],
"sbomFormat": "CycloneDX",
"sbomSpecVersion": "1.6.0",
"summary": {
"componentCount": 143,
"dependencyEdges": 212,
"metadataDigest": {
"sha256": "ba7a324b42d9da408d6c03119f6355bf25cb9f7e1a23355b9ab29cbe588b6e9f"
}
}
},
"predicateType": "StellaOps.SBOMAttestation@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,102 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"artifacts": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner:2025.10.29",
"sbomRef": "urn:uuid:0cda40b3-4e89-4a89-8d94-65011fac1cb1"
}
],
"environment": {
"platform": "linux/amd64",
"runtime": "stellaops/scanner-worker@2025.10",
"tenant": "acme-industries"
},
"findings": [
{
"analysis": {
"evidence": "library reachable from entry trace",
"exploitability": "high",
"normalizedScore": 9.1
},
"id": "CVE-2025-30104",
"package": {
"name": "openssl",
"purl": "pkg:apk/alpine/openssl@3.2.2-r2",
"version": "3.2.2-r2"
},
"references": [
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2025-30104"
],
"severity": "critical",
"status": "affected",
"type": "vulnerability"
},
{
"analysis": {
"evidence": "policy requires minimum TLS 1.3 support",
"exploitability": "medium",
"normalizedScore": 6.3
},
"id": "STELLAOPS-POLICY-2025-0007",
"package": {
"name": "runtime-config",
"purl": "pkg:generic/config/runtime@2025.10.29",
"version": "2025.10.29"
},
"references": [
"stellaops://policy/controls/tls-min-version"
],
"severity": "medium",
"status": "affected",
"type": "policy"
},
{
"analysis": {
"evidence": "package unused according to entry trace",
"exploitability": "none",
"normalizedScore": 0.0
},
"id": "CVE-2023-99999",
"package": {
"name": "curl",
"purl": "pkg:apk/alpine/curl@8.0.1-r0",
"version": "8.0.1-r0"
},
"references": [
"https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2023-99999"
],
"severity": "low",
"status": "not_affected",
"type": "vulnerability"
}
],
"scanner": {
"name": "StellaOps.Scanner",
"rulesCommit": "a6dfc9b1f4e64d5e82b6f54b0f2dd1e8b13d1f8a",
"version": "2025.10.0"
},
"summary": {
"affected": 2,
"critical": 1,
"high": 0,
"info": 7,
"low": 1,
"medium": 1,
"timestamp": "2025-10-29T18:24:07Z"
}
},
"predicateType": "StellaOps.ScanResults@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,67 @@
{
"_type": "https://in-toto.io/Statement/v1",
"predicate": {
"context": {
"generator": "stellaops://vex/excititor@2025.10",
"issuedAt": "2025-10-30T04:05:19Z",
"supplier": "StellaOps Security Research"
},
"statements": [
{
"analysis": {
"detail": "Package not reachable from configured entry trace.",
"impact": "not_affected_usage_scope",
"lastReviewed": "2025-10-30T03:50:02Z"
},
"expires": "2026-01-30T00:00:00Z",
"justification": "vex:component_not_present",
"product": {
"cpe": "cpe:2.3:a:stellaops:scanner_webservice:2025.10.29:*:*:*:*:*:*:*",
"purl": "pkg:docker/ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"status": "not_affected",
"subcomponents": [
{
"path": "/app/bin/StellaOps.Scanner.dll",
"ref": "pkg:nuget/Serilog@3.1.0"
}
],
"timestamp": "2025-10-30T04:05:19Z",
"vulnerability": "CVE-2025-21901"
},
{
"analysis": {
"detail": "Patched in base image digest specified in SBOM.",
"impact": "no_known_exploit",
"lastReviewed": "2025-10-29T23:11:44Z"
},
"expires": "2025-12-15T00:00:00Z",
"justification": "vex:fix_available",
"product": {
"cpe": "cpe:2.3:a:stellaops:scanner_webservice:2025.10.29:*:*:*:*:*:*:*",
"purl": "pkg:docker/ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"status": "affected",
"subcomponents": [
{
"path": "/usr/lib/libssl.so.3",
"ref": "pkg:apk/alpine/openssl@3.2.2-r2"
}
],
"timestamp": "2025-10-29T23:09:31Z",
"vulnerability": "CVE-2025-30104"
}
],
"vexVersion": "OpenVEX-1.0.0"
},
"predicateType": "StellaOps.VEXAttestation@1",
"predicateVersion": "1.0.0",
"subject": [
{
"digest": {
"sha256": "d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
},
"name": "ghcr.io/stellaops/scanner@sha256:d5f5e54d1e1a4c3c7b18961ea7cadb88ec0a93a9f2f40f0e823d9184c83e4d72"
}
]
}

View File

@@ -0,0 +1,372 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://schemas.stella-ops.org/attestations/common/v1",
"title": "StellaOps Attestation Common Definitions v1",
"type": "object",
"description": "Shared schema components reused across StellaOps attestation predicates.",
"$defs": {
"schemaVersion": {
"type": "string",
"pattern": "^1\\.0\\.\\d+$",
"description": "Semantic version identifier for predicate schema revisions. Initial release is 1.0.x."
},
"digestSet": {
"type": "object",
"description": "Map of hashing algorithm to lowercase hexadecimal digest.",
"minProperties": 1,
"maxProperties": 4,
"patternProperties": {
"^[A-Za-z0-9]+$": {
"type": "string",
"pattern": "^[a-f0-9]{32,128}$"
}
},
"additionalProperties": false
},
"subject": {
"type": "object",
"additionalProperties": false,
"required": [
"subjectKind",
"digest"
],
"properties": {
"subjectKind": {
"type": "string",
"enum": [
"container-image",
"sbom",
"scan-report",
"policy-report",
"vex-statement",
"risk-profile",
"artifact"
]
},
"name": {
"type": "string",
"minLength": 1,
"maxLength": 512
},
"uri": {
"type": "string",
"format": "uri"
},
"digest": {
"$ref": "#/$defs/digestSet"
},
"annotations": {
"type": "object",
"additionalProperties": {
"type": "string",
"maxLength": 256
}
},
"imageDigest": {
"type": "string",
"pattern": "^sha256:[a-f0-9]{64}$"
},
"mediaType": {
"type": "string",
"maxLength": 128
},
"sizeBytes": {
"type": "integer",
"minimum": 0
}
}
},
"subjectList": {
"type": "array",
"minItems": 1,
"uniqueItems": true,
"items": {
"$ref": "#/$defs/subject"
}
},
"issuer": {
"type": "object",
"description": "Identity metadata describing the signer of the attestation predicate.",
"additionalProperties": false,
"required": [
"issuerType",
"id",
"signingKey"
],
"properties": {
"issuerType": {
"type": "string",
"enum": [
"service",
"user",
"automation",
"device"
]
},
"id": {
"type": "string",
"minLength": 4,
"maxLength": 256
},
"tenantId": {
"type": "string",
"minLength": 1,
"maxLength": 128
},
"displayName": {
"type": "string",
"maxLength": 256
},
"email": {
"type": "string",
"format": "email"
},
"workload": {
"type": "object",
"additionalProperties": false,
"required": [
"service"
],
"properties": {
"service": {
"type": "string",
"maxLength": 128
},
"cluster": {
"type": "string",
"maxLength": 128
},
"namespace": {
"type": "string",
"maxLength": 128
},
"region": {
"type": "string",
"maxLength": 64
}
}
},
"signingKey": {
"type": "object",
"additionalProperties": false,
"required": [
"keyId",
"mode",
"algorithm"
],
"properties": {
"keyId": {
"type": "string",
"maxLength": 256
},
"mode": {
"type": "string",
"enum": [
"keyless",
"kms",
"hsm",
"fido2",
"offline"
]
},
"algorithm": {
"type": "string",
"maxLength": 64
},
"issuer": {
"type": "string",
"maxLength": 256
},
"certificateChain": {
"type": "array",
"maxItems": 5,
"items": {
"type": "string",
"minLength": 1
}
},
"proof": {
"type": "object",
"additionalProperties": false,
"properties": {
"fulcioIdentity": {
"type": "string",
"maxLength": 256
},
"hardwareClass": {
"type": "string",
"maxLength": 128
}
}
}
}
}
}
},
"material": {
"type": "object",
"additionalProperties": false,
"required": [
"uri"
],
"properties": {
"uri": {
"type": "string",
"minLength": 1,
"maxLength": 512
},
"digest": {
"$ref": "#/$defs/digestSet"
},
"mediaType": {
"type": "string",
"maxLength": 128
},
"role": {
"type": "string",
"maxLength": 64
},
"annotations": {
"type": "object",
"additionalProperties": {
"type": "string",
"maxLength": 128
}
}
}
},
"transparencyLog": {
"type": "object",
"additionalProperties": false,
"required": [
"logId",
"logUrl",
"uuid"
],
"properties": {
"logId": {
"type": "string",
"maxLength": 128
},
"logUrl": {
"type": "string",
"format": "uri"
},
"uuid": {
"type": "string",
"maxLength": 128
},
"index": {
"type": "integer",
"minimum": 0
},
"checkpoint": {
"type": "object",
"additionalProperties": false,
"required": [
"origin",
"size",
"rootHash",
"timestamp"
],
"properties": {
"origin": {
"type": "string",
"maxLength": 128
},
"size": {
"type": "integer",
"minimum": 0
},
"rootHash": {
"type": "string",
"pattern": "^[A-Za-z0-9\\+/=]{16,128}$"
},
"timestamp": {
"type": "string",
"format": "date-time"
}
}
},
"witnessed": {
"type": "boolean"
}
}
},
"transparencyLogList": {
"type": "array",
"items": {
"$ref": "#/$defs/transparencyLog"
}
},
"policyContext": {
"type": "object",
"additionalProperties": false,
"properties": {
"policyId": {
"type": "string",
"maxLength": 128
},
"policyVersion": {
"type": "string",
"maxLength": 32
},
"revisionDigest": {
"$ref": "#/$defs/digestSet"
},
"mode": {
"type": "string",
"enum": [
"enforce",
"dry-run"
]
}
}
},
"vexStatus": {
"type": "string",
"enum": [
"not_affected",
"affected",
"fixed",
"under_investigation"
]
},
"severity": {
"type": "string",
"enum": [
"critical",
"high",
"medium",
"low",
"informational"
]
},
"explainReference": {
"type": "object",
"additionalProperties": false,
"required": [
"id",
"type"
],
"properties": {
"id": {
"type": "string",
"maxLength": 128
},
"type": {
"type": "string",
"enum": [
"rule",
"step",
"binding"
]
},
"message": {
"type": "string",
"maxLength": 2048
}
}
}
}
}

View File

@@ -0,0 +1,160 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-build-provenance.v1.json",
"title": "Build provenance evidence capturing builder inputs and outputs.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"buildType",
"builder",
"materials",
"metadata"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.BuildProvenance@1",
"description": "Schema version identifier."
},
"buildType": {
"type": "string",
"description": "Build type or workflow identifier."
},
"builder": {
"$ref": "#/$defs/BuilderIdentity",
"description": "Builder identity metadata."
},
"materials": {
"type": "array",
"items": {
"$ref": "#/$defs/MaterialReference"
},
"minItems": 1,
"description": "Materials consumed during the build."
},
"metadata": {
"$ref": "#/$defs/BuildMetadata",
"description": "Build metadata information."
},
"environment": {
"$ref": "#/$defs/EnvironmentMetadata",
"description": "Optional environment details for the build context."
}
},
"$defs": {
"BuilderIdentity": {
"type": "object",
"additionalProperties": false,
"description": "Identifies the builder that produced the artifact.",
"required": [
"id"
],
"properties": {
"id": {
"type": "string",
"description": "Unique builder identity (URI or name)."
},
"version": {
"type": "string",
"description": "Builder version identifier."
},
"platform": {
"type": "string",
"description": "Execution platform for the build."
}
}
},
"DigestReference": {
"type": "object",
"additionalProperties": false,
"description": "Normalized digest entry containing algorithm and value.",
"required": [
"algorithm",
"value"
],
"properties": {
"algorithm": {
"type": "string",
"description": "Digest algorithm identifier (e.g., sha256)."
},
"value": {
"type": "string",
"pattern": "^[A-Fa-f0-9]{64}$",
"description": "Hex-encoded digest value."
}
}
},
"MaterialReference": {
"type": "object",
"additionalProperties": false,
"description": "Inputs used during build or analysis.",
"required": [
"uri",
"digests"
],
"properties": {
"uri": {
"type": "string",
"description": "Material location or identifier."
},
"digests": {
"type": "array",
"items": {
"$ref": "#/$defs/DigestReference"
},
"minItems": 1,
"description": "Digests associated with the material."
},
"note": {
"type": "string",
"description": "Optional annotation about the material."
}
}
},
"BuildMetadata": {
"type": "object",
"additionalProperties": false,
"description": "Metadata describing build timing and reproducibility.",
"required": [
"buildStartedOn",
"buildFinishedOn"
],
"properties": {
"buildStartedOn": {
"type": "string",
"format": "date-time",
"description": "UTC timestamp for build start."
},
"buildFinishedOn": {
"type": "string",
"format": "date-time",
"description": "UTC timestamp for build completion."
},
"reproducible": {
"type": "boolean",
"description": "Indicates whether the build is reproducible."
},
"buildInvocationId": {
"type": "string",
"description": "Unique identifier for the build invocation."
}
}
},
"EnvironmentMetadata": {
"type": "object",
"additionalProperties": false,
"description": "Optional environment metadata for build context.",
"properties": {
"platform": {
"type": "string",
"description": "Execution platform or runtime."
},
"imageDigest": {
"$ref": "#/$defs/DigestReference",
"description": "Digest for the environment image."
}
}
}
}
}

View File

@@ -0,0 +1,63 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-custom-evidence.v1.json",
"title": "Generic evidence payload for bespoke attestations.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"subjectDigest",
"kind",
"generatedAt"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.CustomEvidence@1",
"description": "Schema version identifier."
},
"subjectDigest": {
"type": "string",
"pattern": "^sha256:[A-Fa-f0-9]{64}$",
"description": "Artifact digest this evidence references."
},
"kind": {
"type": "string",
"description": "Custom evidence kind identifier."
},
"generatedAt": {
"type": "string",
"format": "date-time",
"description": "Timestamp when the evidence was generated."
},
"properties": {
"type": "array",
"items": {
"$ref": "#/$defs/CustomProperty"
},
"minItems": 0,
"description": "Optional key/value properties for additional context."
}
},
"$defs": {
"CustomProperty": {
"type": "object",
"additionalProperties": false,
"description": "Key/value entry for custom evidence.",
"required": [
"key",
"value"
],
"properties": {
"key": {
"type": "string",
"description": "Property key."
},
"value": {
"type": "string",
"description": "Property value serialized as string."
}
}
}
}
}

View File

@@ -0,0 +1,100 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-policy-evaluation.v1.json",
"title": "Policy evaluation outcome for an artifact.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"subjectDigest",
"policyVersion",
"evaluatedAt",
"outcome",
"decisions"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.PolicyEvaluation@1",
"description": "Schema version identifier."
},
"subjectDigest": {
"type": "string",
"pattern": "^sha256:[A-Fa-f0-9]{64}$",
"description": "Artifact digest that was evaluated."
},
"policyVersion": {
"type": "string",
"description": "Policy bundle version applied."
},
"evaluatedAt": {
"type": "string",
"format": "date-time",
"description": "Timestamp when policy evaluation was executed."
},
"outcome": {
"$ref": "#/$defs/PolicyOutcome",
"description": "Overall evaluation outcome."
},
"decisions": {
"type": "array",
"items": {
"$ref": "#/$defs/PolicyDecision"
},
"minItems": 0,
"description": "Detailed rule-level decisions."
}
},
"$defs": {
"PolicyOutcome": {
"type": "string",
"description": "Policy evaluation outcome values.",
"enum": [
"pass",
"fail",
"waived"
]
},
"PolicyEffect": {
"type": "string",
"description": "Policy rule effect values.",
"enum": [
"allow",
"deny",
"warn"
]
},
"PolicyDecision": {
"type": "object",
"additionalProperties": false,
"description": "Outcome of an individual policy rule evaluation.",
"required": [
"policyId",
"ruleId",
"effect"
],
"properties": {
"policyId": {
"type": "string",
"description": "Policy identifier."
},
"ruleId": {
"type": "string",
"description": "Specific rule identifier."
},
"effect": {
"$ref": "#/$defs/PolicyEffect",
"description": "Resulting effect of the rule."
},
"reason": {
"type": "string",
"description": "Explanation for the effect."
},
"remediation": {
"type": "string",
"description": "Suggested remediation action."
}
}
}
}
}

View File

@@ -0,0 +1,88 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-risk-profile.v1.json",
"title": "Risk scoring evidence summarising exposure for an artifact.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"subjectDigest",
"generatedAt",
"riskScore",
"riskLevel",
"factors"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.RiskProfileEvidence@1",
"description": "Schema version identifier."
},
"subjectDigest": {
"type": "string",
"pattern": "^sha256:[A-Fa-f0-9]{64}$",
"description": "Artifact digest that the risk profile describes."
},
"generatedAt": {
"type": "string",
"format": "date-time",
"description": "Timestamp when scoring was performed."
},
"riskScore": {
"type": "number",
"minimum": 0,
"maximum": 100,
"description": "Normalized risk score between 0 and 100."
},
"riskLevel": {
"$ref": "#/$defs/RiskLevel",
"description": "Risk level classification."
},
"factors": {
"type": "array",
"items": {
"$ref": "#/$defs/RiskFactor"
},
"minItems": 0,
"description": "Factors contributing to the total risk."
}
},
"$defs": {
"RiskLevel": {
"type": "string",
"description": "Risk level indicators.",
"enum": [
"critical",
"high",
"medium",
"low",
"informational"
]
},
"RiskFactor": {
"type": "object",
"additionalProperties": false,
"description": "Factor contributing to risk calculation.",
"required": [
"name",
"weight"
],
"properties": {
"name": {
"type": "string",
"description": "Risk factor name."
},
"weight": {
"type": "number",
"minimum": 0,
"maximum": 1,
"description": "Relative weight between 0 and 1."
},
"description": {
"type": "string",
"description": "Additional context for the factor."
}
}
}
}
}

View File

@@ -0,0 +1,107 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-sbom-attestation.v1.json",
"title": "SBOM attestation linking an SBOM document to an artifact.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"subjectDigest",
"sbomFormat",
"sbomDigest",
"componentCount"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.SBOMAttestation@1",
"description": "Schema version identifier."
},
"subjectDigest": {
"type": "string",
"pattern": "^sha256:[A-Fa-f0-9]{64}$",
"description": "Artifact digest referenced by the SBOM."
},
"sbomFormat": {
"$ref": "#/$defs/SbomFormat",
"description": "SBOM format identifier."
},
"sbomDigest": {
"$ref": "#/$defs/DigestReference",
"description": "Digest of the SBOM document."
},
"sbomUri": {
"type": "string",
"description": "Location where the SBOM can be retrieved."
},
"componentCount": {
"type": "integer",
"minimum": 0,
"description": "Number of components described by the SBOM."
},
"packages": {
"type": "array",
"items": {
"$ref": "#/$defs/SbomPackage"
},
"minItems": 0,
"description": "Optional package listing for quick lookups."
}
},
"$defs": {
"SbomFormat": {
"type": "string",
"description": "Supported SBOM formats.",
"enum": [
"CycloneDX-1.6",
"SBOM-3.0.0"
]
},
"DigestReference": {
"type": "object",
"additionalProperties": false,
"description": "Normalized digest entry containing algorithm and value.",
"required": [
"algorithm",
"value"
],
"properties": {
"algorithm": {
"type": "string",
"description": "Digest algorithm identifier (e.g., sha256)."
},
"value": {
"type": "string",
"pattern": "^[A-Fa-f0-9]{64}$",
"description": "Hex-encoded digest value."
}
}
},
"SbomPackage": {
"type": "object",
"additionalProperties": false,
"description": "SBOM package entry.",
"required": [
"purl"
],
"properties": {
"purl": {
"type": "string",
"description": "Package URL reference."
},
"version": {
"type": "string",
"description": "Resolved package version."
},
"licenses": {
"type": "array",
"items": {
"type": "string"
},
"minItems": 1,
"description": "Associated license identifiers."
}
}
}
}
}

View File

@@ -0,0 +1,122 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-scan-results.v1.json",
"title": "Scanner findings for an artifact at a point in time.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"subjectDigest",
"scannerName",
"scannerVersion",
"generatedAt",
"findings"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.ScanResults@1",
"description": "Schema version identifier."
},
"subjectDigest": {
"type": "string",
"pattern": "^sha256:[A-Fa-f0-9]{64}$",
"description": "Artifact digest that was scanned."
},
"scannerName": {
"type": "string",
"description": "Name of the scanner that produced the findings."
},
"scannerVersion": {
"type": "string",
"description": "Scanner version string."
},
"generatedAt": {
"type": "string",
"format": "date-time",
"description": "Timestamp when the scan results were generated."
},
"findings": {
"type": "array",
"items": {
"$ref": "#/$defs/ScanFinding"
},
"minItems": 0,
"description": "List of findings captured during the scan."
}
},
"$defs": {
"Severity": {
"type": "string",
"description": "Finding severity scale.",
"enum": [
"critical",
"high",
"medium",
"low",
"info"
]
},
"FindingStatus": {
"type": "string",
"description": "Finding lifecycle status.",
"enum": [
"detected",
"confirmed",
"fixed",
"not_affected"
]
},
"ScanFinding": {
"type": "object",
"additionalProperties": false,
"description": "Individual finding from a scan.",
"required": [
"id",
"severity",
"status",
"packageName"
],
"properties": {
"id": {
"type": "string",
"description": "Scanner-issued identifier."
},
"severity": {
"$ref": "#/$defs/Severity",
"description": "Severity classification."
},
"status": {
"$ref": "#/$defs/FindingStatus",
"description": "Lifecycle state of the finding."
},
"packageName": {
"type": "string",
"description": "Affected package name."
},
"packageVersion": {
"type": "string",
"description": "Affected package version."
},
"cvssScore": {
"type": "number",
"minimum": 0,
"maximum": 10,
"description": "CVSS base score if available."
},
"description": {
"type": "string",
"description": "Human-readable description of the finding."
},
"references": {
"type": "array",
"items": {
"type": "string"
},
"minItems": 1,
"description": "Reference links or advisory identifiers."
}
}
}
}
}

View File

@@ -0,0 +1,95 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stella-ops.org/schemas/attestor/stellaops-vex-attestation.v1.json",
"title": "VEX attestation describing vulnerability status for an artifact.",
"type": "object",
"additionalProperties": false,
"required": [
"schemaVersion",
"subjectDigest",
"generatedAt",
"statements"
],
"properties": {
"schemaVersion": {
"type": "string",
"const": "StellaOps.VEXAttestation@1",
"description": "Schema version identifier."
},
"subjectDigest": {
"type": "string",
"pattern": "^sha256:[A-Fa-f0-9]{64}$",
"description": "Artifact digest covered by the VEX statements."
},
"generatedAt": {
"type": "string",
"format": "date-time",
"description": "Timestamp when the VEX attestation was generated."
},
"statements": {
"type": "array",
"items": {
"$ref": "#/$defs/VexStatement"
},
"minItems": 1,
"description": "Collection of VEX statements."
}
},
"$defs": {
"VexStatus": {
"type": "string",
"description": "VEX statement status values.",
"enum": [
"not_affected",
"affected",
"under_investigation",
"fixed"
]
},
"VexStatement": {
"type": "object",
"additionalProperties": false,
"description": "Single VEX statement covering a vulnerability and status.",
"required": [
"vulnerabilityId",
"status",
"timestamp"
],
"properties": {
"vulnerabilityId": {
"type": "string",
"description": "Vulnerability identifier (e.g., CVE)."
},
"status": {
"$ref": "#/$defs/VexStatus",
"description": "VEX status value."
},
"timestamp": {
"type": "string",
"format": "date-time",
"description": "UTC timestamp for statement issuance."
},
"justification": {
"type": "string",
"description": "Justification for the chosen status."
},
"impactStatement": {
"type": "string",
"description": "Impact description for affected systems."
},
"actionStatement": {
"type": "string",
"description": "Recommended remediation or action."
},
"references": {
"type": "array",
"items": {
"type": "string"
},
"minItems": 1,
"description": "Supporting reference URLs."
}
}
}
}
}

View File

@@ -0,0 +1,960 @@
using System.Buffers.Binary;
using System.Collections.Immutable;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Verify;
public sealed class AttestorVerificationEngine : IAttestorVerificationEngine
{
private readonly IDsseCanonicalizer _canonicalizer;
private readonly AttestorOptions _options;
private readonly ILogger<AttestorVerificationEngine> _logger;
public AttestorVerificationEngine(
IDsseCanonicalizer canonicalizer,
IOptions<AttestorOptions> options,
ILogger<AttestorVerificationEngine> logger)
{
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<VerificationReport> EvaluateAsync(
AttestorEntry entry,
AttestorSubmissionRequest.SubmissionBundle? bundle,
DateTimeOffset evaluationTime,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entry);
var signatureIssuer = await EvaluateSignatureAndIssuerAsync(entry, bundle, cancellationToken).ConfigureAwait(false);
var freshness = EvaluateFreshness(entry, evaluationTime);
var transparency = EvaluateTransparency(entry);
var policy = EvaluatePolicy(entry, signatureIssuer.Signatures, signatureIssuer.Issuer, freshness, transparency, bundle is not null);
return new VerificationReport(policy, signatureIssuer.Issuer, freshness, signatureIssuer.Signatures, transparency);
}
private async Task<(SignatureEvaluationResult Signatures, IssuerEvaluationResult Issuer)> EvaluateSignatureAndIssuerAsync(
AttestorEntry entry,
AttestorSubmissionRequest.SubmissionBundle? bundle,
CancellationToken cancellationToken)
{
var signatureIssues = new List<string>();
var issuerIssues = new List<string>();
if (bundle is null)
{
var issuerFromEntry = entry.SignerIdentity;
return (
new SignatureEvaluationResult
{
Status = VerificationSectionStatus.Skipped,
BundleProvided = false,
TotalSignatures = 0,
VerifiedSignatures = 0,
RequiredSignatures = Math.Max(1, _options.Verification.MinimumSignatures),
Issues = Array.Empty<string>()
},
new IssuerEvaluationResult
{
Status = VerificationSectionStatus.Skipped,
Mode = issuerFromEntry.Mode ?? "unknown",
Issuer = issuerFromEntry.Issuer,
SubjectAlternativeName = issuerFromEntry.SubjectAlternativeName,
KeyId = issuerFromEntry.KeyId,
Issues = Array.Empty<string>()
});
}
var canonicalRequest = new AttestorSubmissionRequest
{
Bundle = bundle,
Meta = new AttestorSubmissionRequest.SubmissionMeta
{
Artifact = new AttestorSubmissionRequest.ArtifactInfo
{
Sha256 = entry.Artifact.Sha256,
Kind = entry.Artifact.Kind,
ImageDigest = entry.Artifact.ImageDigest,
SubjectUri = entry.Artifact.SubjectUri
},
BundleSha256 = entry.BundleSha256
}
};
byte[] canonicalBundle;
try
{
canonicalBundle = await _canonicalizer.CanonicalizeAsync(canonicalRequest, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is CryptographicException or FormatException)
{
signatureIssues.Add("bundle_canonicalize_failed");
_logger.LogWarning(ex, "Failed to canonicalize DSSE bundle for {Uuid}", entry.RekorUuid);
var issuerFromEntry = entry.SignerIdentity;
return (
new SignatureEvaluationResult
{
Status = VerificationSectionStatus.Fail,
BundleProvided = true,
TotalSignatures = bundle.Dsse.Signatures.Count,
VerifiedSignatures = 0,
RequiredSignatures = Math.Max(1, _options.Verification.MinimumSignatures),
Issues = signatureIssues.ToArray()
},
new IssuerEvaluationResult
{
Status = VerificationSectionStatus.Warn,
Mode = issuerFromEntry.Mode ?? (bundle.Mode ?? "unknown"),
Issuer = issuerFromEntry.Issuer,
SubjectAlternativeName = issuerFromEntry.SubjectAlternativeName,
KeyId = issuerFromEntry.KeyId,
Issues = new[] { "issuer_verification_skipped" }
});
}
var computedHash = Convert.ToHexString(SHA256.HashData(canonicalBundle)).ToLowerInvariant();
if (!string.Equals(computedHash, entry.BundleSha256, StringComparison.OrdinalIgnoreCase))
{
signatureIssues.Add("bundle_hash_mismatch");
}
var mode = (entry.SignerIdentity.Mode ?? bundle.Mode ?? "unknown").ToLowerInvariant();
var requiredSignatures = Math.Max(1, _options.Verification.MinimumSignatures);
var totalSignatures = bundle.Dsse.Signatures.Count;
var verifiedSignatures = 0;
string? subjectAlternativeName = null;
if (!TryDecodeBase64(bundle.Dsse.PayloadBase64, out var payloadBytes))
{
signatureIssues.Add("bundle_payload_invalid_base64");
return (
new SignatureEvaluationResult
{
Status = VerificationSectionStatus.Fail,
BundleProvided = true,
TotalSignatures = bundle.Dsse.Signatures.Count,
VerifiedSignatures = 0,
RequiredSignatures = requiredSignatures,
Issues = signatureIssues.ToArray()
},
new IssuerEvaluationResult
{
Status = VerificationSectionStatus.Warn,
Mode = mode,
Issuer = entry.SignerIdentity.Issuer,
SubjectAlternativeName = entry.SignerIdentity.SubjectAlternativeName,
KeyId = entry.SignerIdentity.KeyId,
Issues = issuerIssues.ToArray()
});
}
var preAuth = ComputePreAuthEncoding(bundle.Dsse.PayloadType, payloadBytes);
switch (mode)
{
case "kms":
verifiedSignatures = EvaluateKmsSignature(bundle, preAuth, signatureIssues, issuerIssues);
break;
case "keyless":
var keylessResult = EvaluateKeylessSignature(entry, bundle, preAuth, signatureIssues, issuerIssues);
verifiedSignatures = keylessResult.VerifiedSignatures;
subjectAlternativeName = keylessResult.SubjectAlternativeName;
break;
default:
issuerIssues.Add(string.IsNullOrWhiteSpace(mode) ? "signer_mode_unknown" : $"signer_mode_unsupported:{mode}");
break;
}
var signatureStatus = DetermineSignatureStatus(signatureIssues, verifiedSignatures, requiredSignatures, totalSignatures);
var issuerStatus = DetermineIssuerStatus(issuerIssues, mode, verifiedSignatures > 0);
return (
new SignatureEvaluationResult
{
Status = signatureStatus,
BundleProvided = true,
TotalSignatures = totalSignatures,
VerifiedSignatures = verifiedSignatures,
RequiredSignatures = requiredSignatures,
Issues = signatureIssues.ToArray()
},
new IssuerEvaluationResult
{
Status = issuerStatus,
Mode = mode,
Issuer = entry.SignerIdentity.Issuer,
SubjectAlternativeName = subjectAlternativeName ?? entry.SignerIdentity.SubjectAlternativeName,
KeyId = entry.SignerIdentity.KeyId,
Issues = issuerIssues.ToArray()
});
}
private int EvaluateKmsSignature(
AttestorSubmissionRequest.SubmissionBundle bundle,
byte[] preAuthEncoding,
List<string> signatureIssues,
List<string> issuerIssues)
{
if (_options.Security.SignerIdentity.KmsKeys.Count == 0)
{
issuerIssues.Add("kms_key_missing");
return 0;
}
var signatures = new List<byte[]>();
foreach (var signature in bundle.Dsse.Signatures)
{
if (!TryDecodeBase64(signature.Signature, out var signatureBytes))
{
signatureIssues.Add("signature_invalid_base64");
return 0;
}
signatures.Add(signatureBytes);
}
var verified = 0;
foreach (var secret in _options.Security.SignerIdentity.KmsKeys)
{
if (!TryDecodeSecret(secret, out var secretBytes))
{
continue;
}
using var hmac = new HMACSHA256(secretBytes);
var computed = hmac.ComputeHash(preAuthEncoding);
foreach (var candidate in signatures)
{
if (CryptographicOperations.FixedTimeEquals(computed, candidate))
{
verified++;
}
}
}
if (verified == 0)
{
signatureIssues.Add("signature_invalid");
}
return verified;
}
private (int VerifiedSignatures, string? SubjectAlternativeName) EvaluateKeylessSignature(
AttestorEntry entry,
AttestorSubmissionRequest.SubmissionBundle bundle,
byte[] preAuthEncoding,
List<string> signatureIssues,
List<string> issuerIssues)
{
if (bundle.CertificateChain.Count == 0)
{
issuerIssues.Add("certificate_chain_missing");
return (0, null);
}
var certificates = new List<X509Certificate2>();
try
{
foreach (var pem in bundle.CertificateChain)
{
certificates.Add(X509Certificate2.CreateFromPem(pem));
}
}
catch (Exception ex) when (ex is CryptographicException or ArgumentException)
{
issuerIssues.Add("certificate_chain_invalid");
_logger.LogWarning(ex, "Failed to parse certificate chain for {Uuid}", entry.RekorUuid);
return (0, null);
}
var leafCertificate = certificates[0];
var subjectAltName = GetSubjectAlternativeNames(leafCertificate).FirstOrDefault();
if (_options.Security.SignerIdentity.FulcioRoots.Count > 0)
{
using var chain = new X509Chain
{
ChainPolicy =
{
RevocationMode = X509RevocationMode.NoCheck,
VerificationFlags = X509VerificationFlags.NoFlag,
TrustMode = X509ChainTrustMode.CustomRootTrust
}
};
foreach (var rootPath in _options.Security.SignerIdentity.FulcioRoots)
{
try
{
if (File.Exists(rootPath))
{
var rootCertificate = X509CertificateLoader.LoadCertificateFromFile(rootPath);
chain.ChainPolicy.CustomTrustStore.Add(rootCertificate);
}
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to load Fulcio root {Root}", rootPath);
}
}
if (!chain.Build(leafCertificate))
{
var status = string.Join(";", chain.ChainStatus.Select(s => s.StatusInformation.Trim())).Trim(';');
issuerIssues.Add(string.IsNullOrEmpty(status) ? "certificate_chain_untrusted" : $"certificate_chain_untrusted:{status}");
}
}
if (_options.Security.SignerIdentity.AllowedSans.Count > 0)
{
var sans = GetSubjectAlternativeNames(leafCertificate);
if (!sans.Any(san => _options.Security.SignerIdentity.AllowedSans.Contains(san, StringComparer.OrdinalIgnoreCase)))
{
issuerIssues.Add("certificate_san_untrusted");
}
}
var verified = 0;
foreach (var signature in bundle.Dsse.Signatures)
{
if (!TryDecodeBase64(signature.Signature, out var signatureBytes))
{
signatureIssues.Add("signature_invalid_base64");
return (0, subjectAltName);
}
if (TryVerifyWithCertificate(leafCertificate, preAuthEncoding, signatureBytes))
{
verified++;
}
}
if (verified == 0)
{
signatureIssues.Add("signature_invalid");
}
return (verified, subjectAltName);
}
private FreshnessEvaluationResult EvaluateFreshness(AttestorEntry entry, DateTimeOffset evaluationTime)
{
if (entry.CreatedAt == default)
{
return new FreshnessEvaluationResult
{
Status = VerificationSectionStatus.Warn,
CreatedAt = entry.CreatedAt,
EvaluatedAt = evaluationTime,
Age = TimeSpan.Zero,
MaxAge = null,
Issues = new[] { "freshness_unknown" }
};
}
var age = evaluationTime - entry.CreatedAt;
var maxAgeMinutes = _options.Verification.FreshnessMaxAgeMinutes;
var warnAgeMinutes = _options.Verification.FreshnessWarnAgeMinutes;
if (maxAgeMinutes is null)
{
return new FreshnessEvaluationResult
{
Status = VerificationSectionStatus.Skipped,
CreatedAt = entry.CreatedAt,
EvaluatedAt = evaluationTime,
Age = age,
MaxAge = null,
Issues = Array.Empty<string>()
};
}
var maxAge = TimeSpan.FromMinutes(maxAgeMinutes.Value);
VerificationSectionStatus status;
var issues = new List<string>();
if (age > maxAge)
{
status = VerificationSectionStatus.Fail;
issues.Add("freshness_stale");
}
else if (warnAgeMinutes is not null && age > TimeSpan.FromMinutes(warnAgeMinutes.Value))
{
status = VerificationSectionStatus.Warn;
issues.Add("freshness_warning");
}
else
{
status = VerificationSectionStatus.Pass;
}
return new FreshnessEvaluationResult
{
Status = status,
CreatedAt = entry.CreatedAt,
EvaluatedAt = evaluationTime,
Age = age,
MaxAge = maxAge,
Issues = issues.ToArray()
};
}
private TransparencyEvaluationResult EvaluateTransparency(AttestorEntry entry)
{
var issues = new List<string>();
TransparencyEvaluationResult Finalize(VerificationSectionStatus finalStatus, bool proofPresent, bool checkpointPresent, bool inclusionPresent)
{
var witness = entry.Witness;
var witnessPresent = witness is not null;
var witnessMatches = false;
var witnessAggregator = witness?.Aggregator;
var witnessStatus = witness?.Status ?? "missing";
if (witness is null)
{
issues.Add("witness_missing");
if (_options.Verification.RequireWitnessEndorsement)
{
finalStatus = VerificationSectionStatus.Fail;
}
else if (finalStatus != VerificationSectionStatus.Fail)
{
finalStatus = VerificationSectionStatus.Warn;
}
}
else
{
var normalizedStatus = string.IsNullOrWhiteSpace(witness.Status) ? "unknown" : witness.Status!;
if (!string.Equals(normalizedStatus, "endorsed", StringComparison.OrdinalIgnoreCase))
{
issues.Add("witness_status_" + normalizedStatus.ToLowerInvariant());
if (_options.Verification.RequireWitnessEndorsement)
{
finalStatus = VerificationSectionStatus.Fail;
}
else if (finalStatus != VerificationSectionStatus.Fail)
{
finalStatus = VerificationSectionStatus.Warn;
}
}
if (!string.IsNullOrWhiteSpace(witness.RootHash) && entry.Proof?.Checkpoint?.RootHash is not null)
{
if (string.Equals(witness.RootHash, entry.Proof.Checkpoint.RootHash, StringComparison.OrdinalIgnoreCase))
{
witnessMatches = true;
}
else
{
issues.Add("witness_root_mismatch");
if (_options.Verification.RequireWitnessEndorsement)
{
finalStatus = VerificationSectionStatus.Fail;
}
else if (finalStatus != VerificationSectionStatus.Fail)
{
finalStatus = VerificationSectionStatus.Warn;
}
}
}
}
return BuildTransparencyResult(finalStatus, issues, proofPresent, checkpointPresent, inclusionPresent, witnessPresent, witnessMatches, witnessAggregator, witnessStatus);
}
if (entry.Proof is null)
{
issues.Add("proof_missing");
var finalStatus = _options.Verification.RequireTransparencyInclusion ? VerificationSectionStatus.Fail : VerificationSectionStatus.Warn;
return Finalize(finalStatus, false, false, false);
}
if (!TryDecodeHash(entry.BundleSha256, out var bundleHash))
{
issues.Add("bundle_hash_decode_failed");
return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, entry.Proof.Inclusion is not null);
}
if (entry.Proof.Inclusion is null)
{
issues.Add("proof_inclusion_missing");
var finalStatus = _options.Verification.RequireTransparencyInclusion ? VerificationSectionStatus.Fail : VerificationSectionStatus.Warn;
return Finalize(finalStatus, true, entry.Proof.Checkpoint is not null, false);
}
if (entry.Proof.Inclusion.LeafHash is not null)
{
if (!TryDecodeHash(entry.Proof.Inclusion.LeafHash, out var proofLeaf))
{
issues.Add("proof_leafhash_decode_failed");
return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, true);
}
if (!CryptographicOperations.FixedTimeEquals(bundleHash, proofLeaf))
{
issues.Add("proof_leafhash_mismatch");
}
}
var current = bundleHash;
var inclusionNodesPresent = entry.Proof.Inclusion.Path.Count > 0;
if (inclusionNodesPresent)
{
var nodes = new List<ProofPathNode>();
foreach (var element in entry.Proof.Inclusion.Path)
{
if (!ProofPathNode.TryParse(element, out var node))
{
issues.Add("proof_path_decode_failed");
return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, true);
}
if (!node.HasOrientation)
{
issues.Add("proof_path_orientation_missing");
return Finalize(VerificationSectionStatus.Fail, true, entry.Proof.Checkpoint is not null, true);
}
nodes.Add(node);
}
foreach (var node in nodes)
{
current = node.Left ? HashInternal(node.Hash, current) : HashInternal(current, node.Hash);
}
}
if (entry.Proof.Checkpoint is null)
{
issues.Add("checkpoint_missing");
var finalStatus = _options.Verification.RequireCheckpoint ? VerificationSectionStatus.Fail : VerificationSectionStatus.Warn;
return Finalize(finalStatus, true, false, inclusionNodesPresent);
}
if (!TryDecodeHash(entry.Proof.Checkpoint.RootHash, out var rootHash))
{
issues.Add("checkpoint_root_decode_failed");
return Finalize(VerificationSectionStatus.Fail, true, true, inclusionNodesPresent);
}
if (!CryptographicOperations.FixedTimeEquals(current, rootHash))
{
issues.Add("proof_root_mismatch");
}
var status = issues.Count == 0 ? VerificationSectionStatus.Pass : VerificationSectionStatus.Fail;
return Finalize(status, true, true, inclusionNodesPresent);
}
private PolicyEvaluationResult EvaluatePolicy(
AttestorEntry entry,
SignatureEvaluationResult signatures,
IssuerEvaluationResult issuer,
FreshnessEvaluationResult freshness,
TransparencyEvaluationResult transparency,
bool bundleProvided)
{
var issues = new List<string>();
var status = VerificationSectionStatus.Pass;
if (!string.Equals(entry.Status, "included", StringComparison.OrdinalIgnoreCase))
{
issues.Add($"log_status_{entry.Status.ToLowerInvariant()}");
status = VerificationSectionStatus.Fail;
}
if (_options.Verification.RequireBundleForSignatureValidation && !bundleProvided)
{
issues.Add("bundle_required");
status = VerificationSectionStatus.Fail;
}
status = CombinePolicyStatus(status, signatures.Status, "signatures", issues);
status = CombinePolicyStatus(status, issuer.Status, "issuer", issues);
status = CombinePolicyStatus(status, freshness.Status, "freshness", issues, warnOnly: true);
status = CombinePolicyStatus(status, transparency.Status, "transparency", issues);
var verdict = status switch
{
VerificationSectionStatus.Fail => "fail",
VerificationSectionStatus.Warn => "warn",
VerificationSectionStatus.Pass => "pass",
_ => "unknown"
};
var attributes = ImmutableDictionary<string, string>.Empty
.Add("status", entry.Status ?? "unknown")
.Add("logBackend", entry.Log.Backend ?? "primary")
.Add("logUrl", entry.Log.Url ?? string.Empty);
if (entry.Index.HasValue)
{
attributes = attributes.Add("index", entry.Index.Value.ToString());
}
if (entry.Proof?.Checkpoint?.Timestamp is not null)
{
attributes = attributes.Add("checkpointTs", entry.Proof.Checkpoint.Timestamp.Value.ToString("O"));
}
return new PolicyEvaluationResult
{
Status = status,
PolicyId = _options.Verification.PolicyId,
PolicyVersion = _options.Verification.PolicyVersion,
Verdict = verdict,
Issues = issues.Distinct(StringComparer.OrdinalIgnoreCase).ToArray(),
Attributes = attributes
};
}
private static VerificationSectionStatus DetermineSignatureStatus(
IReadOnlyCollection<string> issues,
int verified,
int required,
int total)
{
if (total == 0)
{
return VerificationSectionStatus.Fail;
}
if (issues.Count > 0)
{
return issues.Contains("signature_invalid", StringComparer.OrdinalIgnoreCase)
|| issues.Contains("bundle_payload_invalid_base64", StringComparer.OrdinalIgnoreCase)
|| issues.Contains("bundle_hash_mismatch", StringComparer.OrdinalIgnoreCase)
? VerificationSectionStatus.Fail
: VerificationSectionStatus.Warn;
}
return verified >= required ? VerificationSectionStatus.Pass : VerificationSectionStatus.Fail;
}
private static VerificationSectionStatus DetermineIssuerStatus(
IReadOnlyCollection<string> issues,
string mode,
bool signatureVerified)
{
if (issues.Count == 0)
{
return signatureVerified ? VerificationSectionStatus.Pass : VerificationSectionStatus.Warn;
}
if (issues.Any(issue => issue.StartsWith("certificate_", StringComparison.OrdinalIgnoreCase) || issue.StartsWith("kms_", StringComparison.OrdinalIgnoreCase)))
{
return VerificationSectionStatus.Fail;
}
if (issues.Any(issue => issue.StartsWith("signer_mode", StringComparison.OrdinalIgnoreCase)))
{
return VerificationSectionStatus.Fail;
}
return VerificationSectionStatus.Warn;
}
private static VerificationSectionStatus CombinePolicyStatus(
VerificationSectionStatus current,
VerificationSectionStatus next,
string component,
List<string> issues,
bool warnOnly = false)
{
if (next == VerificationSectionStatus.Fail)
{
issues.Add($"policy_blocked:{component}");
return VerificationSectionStatus.Fail;
}
if (next == VerificationSectionStatus.Warn && !warnOnly)
{
issues.Add($"policy_warn:{component}");
return current == VerificationSectionStatus.Fail ? current : VerificationSectionStatus.Warn;
}
if (next == VerificationSectionStatus.Warn && warnOnly)
{
issues.Add($"policy_warn:{component}");
return current;
}
return current;
}
private static TransparencyEvaluationResult BuildTransparencyResult(
VerificationSectionStatus status,
List<string> issues,
bool proofPresent,
bool checkpointPresent,
bool inclusionPresent,
bool witnessPresent,
bool witnessMatches,
string? witnessAggregator,
string witnessStatus)
{
return new TransparencyEvaluationResult
{
Status = status,
ProofPresent = proofPresent,
CheckpointPresent = checkpointPresent,
InclusionPathPresent = inclusionPresent,
WitnessPresent = witnessPresent,
WitnessMatchesRoot = witnessMatches,
WitnessAggregator = witnessAggregator,
WitnessStatus = witnessStatus,
Issues = issues.ToArray()
};
}
private static bool TryVerifyWithCertificate(X509Certificate2 certificate, byte[] preAuthEncoding, byte[] signature)
{
try
{
var ecdsa = certificate.GetECDsaPublicKey();
if (ecdsa is not null)
{
using (ecdsa)
{
if (ecdsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256))
{
return true;
}
}
}
var rsa = certificate.GetRSAPublicKey();
if (rsa is not null)
{
using (rsa)
{
if (rsa.VerifyData(preAuthEncoding, signature, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1))
{
return true;
}
}
}
}
catch (CryptographicException)
{
return false;
}
return false;
}
private static IEnumerable<string> GetSubjectAlternativeNames(X509Certificate2 certificate)
{
foreach (var extension in certificate.Extensions)
{
if (string.Equals(extension.Oid?.Value, "2.5.29.17", StringComparison.Ordinal))
{
var formatted = extension.Format(true);
var lines = formatted.Split(new[] { '\r', '\n' }, StringSplitOptions.RemoveEmptyEntries);
foreach (var line in lines)
{
var parts = line.Split('=');
if (parts.Length == 2)
{
yield return parts[1].Trim();
}
}
}
}
}
private static byte[] ComputePreAuthEncoding(string payloadType, byte[] payload)
{
var headerBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[6 + 8 + headerBytes.Length + 8 + payload.Length];
var offset = 0;
Encoding.ASCII.GetBytes("DSSEv1", 0, 6, buffer, offset);
offset += 6;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)headerBytes.Length);
offset += 8;
Buffer.BlockCopy(headerBytes, 0, buffer, offset, headerBytes.Length);
offset += headerBytes.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, 8), (ulong)payload.Length);
offset += 8;
Buffer.BlockCopy(payload, 0, buffer, offset, payload.Length);
return buffer;
}
private static byte[] HashInternal(byte[] left, byte[] right)
{
using var sha = SHA256.Create();
var buffer = new byte[1 + left.Length + right.Length];
buffer[0] = 0x01;
Buffer.BlockCopy(left, 0, buffer, 1, left.Length);
Buffer.BlockCopy(right, 0, buffer, 1 + left.Length, right.Length);
return sha.ComputeHash(buffer);
}
private static bool TryDecodeSecret(string value, out byte[] bytes)
{
if (string.IsNullOrWhiteSpace(value))
{
bytes = Array.Empty<byte>();
return false;
}
value = value.Trim();
if (value.StartsWith("base64:", StringComparison.OrdinalIgnoreCase))
{
return TryDecodeBase64(value[7..], out bytes);
}
if (value.StartsWith("hex:", StringComparison.OrdinalIgnoreCase))
{
return TryDecodeHex(value[4..], out bytes);
}
if (TryDecodeBase64(value, out bytes))
{
return true;
}
if (TryDecodeHex(value, out bytes))
{
return true;
}
bytes = Array.Empty<byte>();
return false;
}
private static bool TryDecodeBase64(string value, out byte[] bytes)
{
try
{
bytes = Convert.FromBase64String(value);
return true;
}
catch (FormatException)
{
bytes = Array.Empty<byte>();
return false;
}
}
private static bool TryDecodeHex(string value, out byte[] bytes)
{
try
{
bytes = Convert.FromHexString(value);
return true;
}
catch (FormatException)
{
bytes = Array.Empty<byte>();
return false;
}
}
private static bool TryDecodeHash(string? value, out byte[] bytes)
{
bytes = Array.Empty<byte>();
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
if (TryDecodeHex(trimmed, out bytes))
{
return true;
}
if (TryDecodeBase64(trimmed, out bytes))
{
return true;
}
bytes = Array.Empty<byte>();
return false;
}
private readonly struct ProofPathNode
{
private ProofPathNode(bool hasOrientation, bool left, byte[] hash)
{
HasOrientation = hasOrientation;
Left = left;
Hash = hash;
}
public bool HasOrientation { get; }
public bool Left { get; }
public byte[] Hash { get; }
public static bool TryParse(string value, out ProofPathNode node)
{
node = default;
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var trimmed = value.Trim();
var parts = trimmed.Split(':', 2);
bool hasOrientation = false;
bool left = false;
string hashPart = trimmed;
if (parts.Length == 2)
{
var prefix = parts[0].Trim().ToLowerInvariant();
if (prefix is "l" or "left")
{
hasOrientation = true;
left = true;
}
else if (prefix is "r" or "right")
{
hasOrientation = true;
left = false;
}
hashPart = parts[1].Trim();
}
if (!TryDecodeHash(hashPart, out var hash))
{
return false;
}
node = new ProofPathNode(hasOrientation, left, hash);
return true;
}
}
}

View File

@@ -0,0 +1,14 @@
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Verify;
public interface IAttestorVerificationEngine
{
Task<VerificationReport> EvaluateAsync(
AttestorEntry entry,
AttestorSubmissionRequest.SubmissionBundle? bundle,
DateTimeOffset evaluationTime,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Attestor\\StellaOps.Attestor.Core\\StellaOps.Attestor.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -3,11 +3,11 @@
## Sprint 73 Policy Integration
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-VERIFY-73-001 | TODO | Verification Guild, Policy Guild | VERPOL-73-001, ATTESTOR-73-002 | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Engine returns report DTOs; policy rules honored; unit tests cover pass/fail scenarios. |
| ATTEST-VERIFY-73-002 | TODO | Verification Guild | ATTEST-VERIFY-73-001 | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Cache reduces repeated verification cost; tests cover cache hits/misses. |
| ATTEST-VERIFY-73-001 | DONE | Verification Guild, Policy Guild | VERPOL-73-001, ATTESTOR-73-002 | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Engine returns report DTOs; policy rules honored; unit tests cover pass/fail scenarios. |
| ATTEST-VERIFY-73-002 | DONE | Verification Guild | ATTEST-VERIFY-73-001 | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Cache reduces repeated verification cost; tests cover cache hits/misses. |
## Sprint 74 Explainability & Observability
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|----|--------|----------|------------|-------------|---------------|
| ATTEST-VERIFY-74-001 | TODO | Verification Guild, Observability Guild | ATTEST-VERIFY-73-001 | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Metrics visible; spans present; SLO thresholds defined. |
| ATTEST-VERIFY-74-002 | TODO | Verification Guild, Docs Guild | ATTEST-VERIFY-73-001 | Document verification report schema and explainability in `/docs/modules/attestor/workflows.md`. | Documentation merged; examples verified via tests. |
| ATTEST-VERIFY-74-001 | DONE | Verification Guild, Observability Guild | ATTEST-VERIFY-73-001 | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Metrics visible; spans present; SLO thresholds defined. |
| ATTEST-VERIFY-74-002 | DONE (2025-11-01) | Verification Guild, Docs Guild | ATTEST-VERIFY-73-001 | Document verification report schema and explainability in `/docs/modules/attestor/workflows.md`. | Documentation merged; examples verified via tests. |

View File

@@ -0,0 +1,94 @@
using System;
using System.Collections.Generic;
using System.Linq;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Core.Bulk;
public sealed class BulkVerificationJob
{
public string Id { get; set; } = Guid.NewGuid().ToString("N");
public int Version { get; set; }
public BulkVerificationJobStatus Status { get; set; } = BulkVerificationJobStatus.Queued;
public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow;
public DateTimeOffset? StartedAt { get; set; }
public DateTimeOffset? CompletedAt { get; set; }
public BulkVerificationJobContext Context { get; set; } = new();
public IList<BulkVerificationJobItem> Items { get; set; } = new List<BulkVerificationJobItem>();
public int ProcessedCount { get; set; }
public int SucceededCount { get; set; }
public int FailedCount { get; set; }
public string? FailureReason { get; set; }
public bool AllCompleted => Items.Count > 0 && Items.All(i => i.Status is BulkVerificationItemStatus.Succeeded or BulkVerificationItemStatus.Failed);
}
public sealed class BulkVerificationJobItem
{
public int Index { get; set; }
public BulkVerificationItemRequest Request { get; set; } = new();
public BulkVerificationItemStatus Status { get; set; } = BulkVerificationItemStatus.Pending;
public DateTimeOffset? StartedAt { get; set; }
public DateTimeOffset? CompletedAt { get; set; }
public AttestorVerificationResult? Result { get; set; }
public string? Error { get; set; }
}
public sealed class BulkVerificationItemRequest
{
public string? Uuid { get; set; }
public string? ArtifactSha256 { get; set; }
public string? Subject { get; set; }
public string? EnvelopeId { get; set; }
public string? PolicyVersion { get; set; }
public bool RefreshProof { get; set; }
}
public sealed class BulkVerificationJobContext
{
public string? Tenant { get; set; }
public string? RequestedBy { get; set; }
public string? ClientId { get; set; }
public IList<string> Scopes { get; set; } = new List<string>();
}
public enum BulkVerificationJobStatus
{
Queued,
Running,
Completed,
Failed
}
public enum BulkVerificationItemStatus
{
Pending,
Running,
Succeeded,
Failed
}

View File

@@ -0,0 +1,18 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.Core.Bulk;
public interface IBulkVerificationJobStore
{
Task<BulkVerificationJob> CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default);
Task<BulkVerificationJob?> GetAsync(string jobId, CancellationToken cancellationToken = default);
Task<BulkVerificationJob?> TryAcquireAsync(CancellationToken cancellationToken = default);
Task<bool> TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default);
Task<int> CountQueuedAsync(CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,59 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
namespace StellaOps.Attestor.Core.Observability;
public sealed class AttestorActivitySource : IDisposable
{
public const string Name = "StellaOps.Attestor";
private readonly ActivitySource _source = new(Name);
private bool _disposed;
public Activity? StartVerification(string subject, string issuer, string policy)
{
var tags = new ActivityTagsCollection
{
{ AttestorTelemetryTags.Subject, subject },
{ AttestorTelemetryTags.Issuer, issuer },
{ AttestorTelemetryTags.Policy, policy }
};
return _source.StartActivity("attestor.verify", ActivityKind.Internal, default(ActivityContext), tags);
}
public Activity? StartProofRefresh(string backend, string policy)
{
var tags = new ActivityTagsCollection
{
{ "attestor.log.backend", backend },
{ AttestorTelemetryTags.Policy, policy }
};
return _source.StartActivity("attestor.verify.refresh_proof", ActivityKind.Internal, default(ActivityContext), tags);
}
public Activity? StartWitnessFetch(string aggregator)
{
var tags = new ActivityTagsCollection
{
{ AttestorTelemetryTags.WitnessAggregator, string.IsNullOrWhiteSpace(aggregator) ? "unknown" : aggregator }
};
return _source.StartActivity("attestor.verify.fetch_witness", ActivityKind.Internal, default(ActivityContext), tags);
}
public ActivitySource Source => _source;
public void Dispose()
{
if (_disposed)
{
return;
}
_source.Dispose();
_disposed = true;
}
}

View File

@@ -1,45 +1,75 @@
using System.Diagnostics.Metrics;
namespace StellaOps.Attestor.Core.Observability;
public sealed class AttestorMetrics : IDisposable
{
public const string MeterName = "StellaOps.Attestor";
private readonly Meter _meter;
private bool _disposed;
public AttestorMetrics()
{
_meter = new Meter(MeterName);
SubmitTotal = _meter.CreateCounter<long>("attestor.submit_total", description: "Total submission attempts grouped by result and backend.");
SubmitLatency = _meter.CreateHistogram<double>("attestor.submit_latency_seconds", unit: "s", description: "Submission latency in seconds per backend.");
ProofFetchTotal = _meter.CreateCounter<long>("attestor.proof_fetch_total", description: "Proof fetch attempts grouped by result.");
VerifyTotal = _meter.CreateCounter<long>("attestor.verify_total", description: "Verification attempts grouped by result.");
DedupeHitsTotal = _meter.CreateCounter<long>("attestor.dedupe_hits_total", description: "Number of dedupe hits by outcome.");
ErrorTotal = _meter.CreateCounter<long>("attestor.errors_total", description: "Total errors grouped by type.");
}
public Counter<long> SubmitTotal { get; }
public Histogram<double> SubmitLatency { get; }
public Counter<long> ProofFetchTotal { get; }
public Counter<long> VerifyTotal { get; }
public Counter<long> DedupeHitsTotal { get; }
public Counter<long> ErrorTotal { get; }
public void Dispose()
{
if (_disposed)
{
return;
}
_meter.Dispose();
_disposed = true;
}
}
using System.Diagnostics.Metrics;
namespace StellaOps.Attestor.Core.Observability;
public sealed class AttestorMetrics : IDisposable
{
public const string MeterName = "StellaOps.Attestor";
private readonly Meter _meter;
private bool _disposed;
public AttestorMetrics()
{
_meter = new Meter(MeterName);
SubmitTotal = _meter.CreateCounter<long>("attestor.submit_total", description: "Total submission attempts grouped by result and backend.");
SubmitLatency = _meter.CreateHistogram<double>("attestor.submit_latency_seconds", unit: "s", description: "Submission latency in seconds per backend.");
SignTotal = _meter.CreateCounter<long>("attestor.sign_total", description: "Total signing attempts grouped by result/algorithm/provider.");
SignLatency = _meter.CreateHistogram<double>("attestor.sign_latency_seconds", unit: "s", description: "Signing latency in seconds grouped by algorithm/provider.");
ProofFetchTotal = _meter.CreateCounter<long>("attestor.proof_fetch_total", description: "Proof fetch attempts grouped by result.");
WitnessFetchTotal = _meter.CreateCounter<long>("attestor.witness_fetch_total", description: "Transparency witness fetch attempts grouped by result and aggregator.");
WitnessFetchLatency = _meter.CreateHistogram<double>("attestor.witness_fetch_latency_seconds", unit: "s", description: "Transparency witness fetch latency grouped by aggregator.");
VerifyTotal = _meter.CreateCounter<long>("attestor.verify_total", description: "Verification attempts grouped by subject, issuer, policy, and result.");
VerifyLatency = _meter.CreateHistogram<double>("attestor.verify_latency_seconds", unit: "s", description: "Verification latency in seconds grouped by subject, issuer, policy, and result.");
VerifyCacheLookupTotal = _meter.CreateCounter<long>("attestor.verify_cache_lookup_total", description: "Verification cache lookups.");
VerifyCacheHitTotal = _meter.CreateCounter<long>("attestor.verify_cache_hit_total", description: "Verification cache hits.");
DedupeHitsTotal = _meter.CreateCounter<long>("attestor.dedupe_hits_total", description: "Number of dedupe hits by outcome.");
BulkJobsTotal = _meter.CreateCounter<long>("attestor.bulk_jobs_total", description: "Bulk verification jobs processed grouped by status.");
BulkItemsTotal = _meter.CreateCounter<long>("attestor.bulk_items_total", description: "Bulk verification items processed grouped by result.");
BulkJobDuration = _meter.CreateHistogram<double>("attestor.bulk_job_duration_seconds", unit: "s", description: "Bulk verification job duration in seconds grouped by status.");
ErrorTotal = _meter.CreateCounter<long>("attestor.errors_total", description: "Total errors grouped by type.");
}
public Counter<long> SubmitTotal { get; }
public Histogram<double> SubmitLatency { get; }
public Counter<long> SignTotal { get; }
public Histogram<double> SignLatency { get; }
public Counter<long> ProofFetchTotal { get; }
public Counter<long> WitnessFetchTotal { get; }
public Histogram<double> WitnessFetchLatency { get; }
public Counter<long> VerifyTotal { get; }
public Histogram<double> VerifyLatency { get; }
public Counter<long> VerifyCacheLookupTotal { get; }
public Counter<long> VerifyCacheHitTotal { get; }
public Counter<long> DedupeHitsTotal { get; }
public Counter<long> BulkJobsTotal { get; }
public Counter<long> BulkItemsTotal { get; }
public Histogram<double> BulkJobDuration { get; }
public Counter<long> ErrorTotal { get; }
public void Dispose()
{
if (_disposed)
{
return;
}
_meter.Dispose();
_disposed = true;
}
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Attestor.Core.Observability;
public static class AttestorTelemetryTags
{
public const string Subject = "attestor.subject";
public const string Issuer = "attestor.issuer";
public const string Policy = "attestor.policy";
public const string Result = "result";
public const string WitnessAggregator = "attestor.witness.aggregator";
}

View File

@@ -0,0 +1,74 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Core.Offline;
public static class AttestorBundleVersions
{
public const string V1 = "stellaops.attestor.bundle/1";
public const string Current = V1;
}
public sealed class AttestorBundlePackage
{
public string Version { get; init; } = AttestorBundleVersions.Current;
public DateTimeOffset GeneratedAt { get; init; }
public IReadOnlyList<AttestorBundleItem> Items { get; init; } = Array.Empty<AttestorBundleItem>();
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ContinuationToken { get; init; }
}
public sealed class AttestorBundleItem
{
public AttestorEntry Entry { get; init; } = new();
/// <summary>
/// Canonical DSSE envelope encoded as base64 (UTF-8 JSON).
/// </summary>
public string CanonicalBundle { get; init; } = string.Empty;
/// <summary>
/// Optional Rekor proof payload encoded as base64 (UTF-8 JSON).
/// </summary>
public string? Proof { get; init; }
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
public sealed class AttestorBundleExportRequest
{
public IReadOnlyList<string> Uuids { get; init; } = Array.Empty<string>();
public string? Subject { get; init; }
public string? Type { get; init; }
public string? Issuer { get; init; }
public string? Scope { get; init; }
public DateTimeOffset? CreatedAfter { get; init; }
public DateTimeOffset? CreatedBefore { get; init; }
public int? Limit { get; init; }
public string? ContinuationToken { get; init; }
}
public sealed class AttestorBundleImportResult
{
public int Imported { get; init; }
public int Updated { get; init; }
public int Skipped { get; init; }
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
}

View File

@@ -0,0 +1,11 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.Core.Offline;
public interface IAttestorBundleService
{
Task<AttestorBundlePackage> ExportAsync(AttestorBundleExportRequest request, CancellationToken cancellationToken = default);
Task<AttestorBundleImportResult> ImportAsync(AttestorBundlePackage package, CancellationToken cancellationToken = default);
}

View File

@@ -1,148 +1,301 @@
using System.Collections.Generic;
namespace StellaOps.Attestor.Core.Options;
/// <summary>
/// Strongly typed configuration for the Attestor service.
/// </summary>
public sealed class AttestorOptions
{
public string Listen { get; set; } = "https://0.0.0.0:8444";
public SecurityOptions Security { get; set; } = new();
public RekorOptions Rekor { get; set; } = new();
public MongoOptions Mongo { get; set; } = new();
public RedisOptions Redis { get; set; } = new();
public S3Options S3 { get; set; } = new();
public QuotaOptions Quotas { get; set; } = new();
public TelemetryOptions Telemetry { get; set; } = new();
public sealed class SecurityOptions
{
public MtlsOptions Mtls { get; set; } = new();
public AuthorityOptions Authority { get; set; } = new();
public SignerIdentityOptions SignerIdentity { get; set; } = new();
}
public sealed class MtlsOptions
{
public bool RequireClientCertificate { get; set; } = true;
public string? CaBundle { get; set; }
public IList<string> AllowedSubjects { get; set; } = new List<string>();
public IList<string> AllowedThumbprints { get; set; } = new List<string>();
}
public sealed class AuthorityOptions
{
public string? Issuer { get; set; }
public string? JwksUrl { get; set; }
public string? RequireSenderConstraint { get; set; }
public bool RequireHttpsMetadata { get; set; } = true;
public IList<string> Audiences { get; set; } = new List<string>();
public IList<string> RequiredScopes { get; set; } = new List<string>();
}
public sealed class SignerIdentityOptions
{
public IList<string> Mode { get; set; } = new List<string> { "keyless", "kms" };
public IList<string> FulcioRoots { get; set; } = new List<string>();
public IList<string> AllowedSans { get; set; } = new List<string>();
public IList<string> KmsKeys { get; set; } = new List<string>();
}
public sealed class RekorOptions
{
public RekorBackendOptions Primary { get; set; } = new();
public RekorMirrorOptions Mirror { get; set; } = new();
}
public class RekorBackendOptions
{
public string? Url { get; set; }
public int ProofTimeoutMs { get; set; } = 15_000;
public int PollIntervalMs { get; set; } = 250;
public int MaxAttempts { get; set; } = 60;
}
public sealed class RekorMirrorOptions : RekorBackendOptions
{
public bool Enabled { get; set; }
}
public sealed class MongoOptions
{
public string? Uri { get; set; }
public string Database { get; set; } = "attestor";
public string EntriesCollection { get; set; } = "entries";
public string DedupeCollection { get; set; } = "dedupe";
public string AuditCollection { get; set; } = "audit";
}
public sealed class RedisOptions
{
public string? Url { get; set; }
public string? DedupePrefix { get; set; } = "attestor:dedupe:";
}
public sealed class S3Options
{
public bool Enabled { get; set; }
public string? Endpoint { get; set; }
public string? Bucket { get; set; }
public string? Prefix { get; set; }
public string? ObjectLockMode { get; set; }
public bool UseTls { get; set; } = true;
}
public sealed class QuotaOptions
{
public PerCallerQuotaOptions PerCaller { get; set; } = new();
}
public sealed class PerCallerQuotaOptions
{
public int Qps { get; set; } = 50;
public int Burst { get; set; } = 100;
}
public sealed class TelemetryOptions
{
public bool EnableLogging { get; set; } = true;
public bool EnableTracing { get; set; } = false;
}
}
using System.Collections.Generic;
using StellaOps.Cryptography;
namespace StellaOps.Attestor.Core.Options;
/// <summary>
/// Strongly typed configuration for the Attestor service.
/// </summary>
public sealed class AttestorOptions
{
public string Listen { get; set; } = "https://0.0.0.0:8444";
public SecurityOptions Security { get; set; } = new();
public RekorOptions Rekor { get; set; } = new();
public SigningOptions Signing { get; set; } = new();
public MongoOptions Mongo { get; set; } = new();
public RedisOptions Redis { get; set; } = new();
public S3Options S3 { get; set; } = new();
public QuotaOptions Quotas { get; set; } = new();
public BulkVerificationOptions BulkVerification { get; set; } = new();
public CacheOptions Cache { get; set; } = new();
public TelemetryOptions Telemetry { get; set; } = new();
public TransparencyWitnessOptions TransparencyWitness { get; set; } = new();
public VerificationOptions Verification { get; set; } = new();
public sealed class SecurityOptions
{
public MtlsOptions Mtls { get; set; } = new();
public AuthorityOptions Authority { get; set; } = new();
public SignerIdentityOptions SignerIdentity { get; set; } = new();
public SubmissionLimitOptions SubmissionLimits { get; set; } = new();
}
public sealed class MtlsOptions
{
public bool RequireClientCertificate { get; set; } = true;
public string? CaBundle { get; set; }
public IList<string> AllowedSubjects { get; set; } = new List<string>();
public IList<string> AllowedThumbprints { get; set; } = new List<string>();
}
public sealed class AuthorityOptions
{
public string? Issuer { get; set; }
public string? JwksUrl { get; set; }
public string? RequireSenderConstraint { get; set; }
public bool RequireHttpsMetadata { get; set; } = true;
public IList<string> Audiences { get; set; } = new List<string>();
public IList<string> RequiredScopes { get; set; } = new List<string>();
}
public sealed class SignerIdentityOptions
{
public IList<string> Mode { get; set; } = new List<string> { "keyless", "kms" };
public IList<string> FulcioRoots { get; set; } = new List<string>();
public IList<string> AllowedSans { get; set; } = new List<string>();
public IList<string> KmsKeys { get; set; } = new List<string>();
}
public sealed class SubmissionLimitOptions
{
/// <summary>
/// Maximum allowed DSSE payload size, in bytes, after base64 decoding.
/// </summary>
public int MaxPayloadBytes { get; set; } = 2 * 1024 * 1024;
/// <summary>
/// Maximum number of DSSE signatures accepted per submission.
/// </summary>
public int MaxSignatures { get; set; } = 6;
/// <summary>
/// Maximum number of certificates allowed in the leaf-to-root chain.
/// </summary>
public int MaxCertificateChainEntries { get; set; } = 6;
}
public sealed class RekorOptions
{
public RekorBackendOptions Primary { get; set; } = new();
public RekorMirrorOptions Mirror { get; set; } = new();
}
public class RekorBackendOptions
{
public string? Url { get; set; }
public int ProofTimeoutMs { get; set; } = 15_000;
public int PollIntervalMs { get; set; } = 250;
public int MaxAttempts { get; set; } = 60;
}
public sealed class RekorMirrorOptions : RekorBackendOptions
{
public bool Enabled { get; set; }
}
public sealed class MongoOptions
{
public string? Uri { get; set; }
public string Database { get; set; } = "attestor";
public string EntriesCollection { get; set; } = "entries";
public string DedupeCollection { get; set; } = "dedupe";
public string AuditCollection { get; set; } = "audit";
public string BulkJobsCollection { get; set; } = "bulk_jobs";
}
public sealed class RedisOptions
{
public string? Url { get; set; }
public string? DedupePrefix { get; set; } = "attestor:dedupe:";
}
public sealed class S3Options
{
public bool Enabled { get; set; }
public string? Endpoint { get; set; }
public string? Bucket { get; set; }
public string? Prefix { get; set; }
public string? ObjectLockMode { get; set; }
public bool UseTls { get; set; } = true;
}
public sealed class QuotaOptions
{
public PerCallerQuotaOptions PerCaller { get; set; } = new();
public BulkVerificationQuotaOptions Bulk { get; set; } = new();
}
public sealed class PerCallerQuotaOptions
{
public int Qps { get; set; } = 50;
public int Burst { get; set; } = 100;
}
public sealed class BulkVerificationQuotaOptions
{
public int RequestsPerMinute { get; set; } = 6;
public int MaxItemsPerJob { get; set; } = 100;
public int MaxQueuedJobs { get; set; } = 20;
public int MaxConcurrentJobs { get; set; } = 1;
}
public sealed class CacheOptions
{
public VerificationCacheOptions Verification { get; set; } = new();
}
public sealed class VerificationCacheOptions
{
public bool Enabled { get; set; } = true;
public int TtlSeconds { get; set; } = 300;
}
public sealed class TelemetryOptions
{
public bool EnableLogging { get; set; } = true;
public bool EnableTracing { get; set; } = false;
}
public sealed class BulkVerificationOptions
{
public int WorkerPollSeconds { get; set; } = 1;
public int ItemDelayMilliseconds { get; set; } = 10;
public int MaxAttemptsPerItem { get; set; } = 1;
}
public sealed class VerificationOptions
{
public string PolicyId { get; set; } = "default";
public string PolicyVersion { get; set; } = "1.0.0";
public int MinimumSignatures { get; set; } = 1;
public int? FreshnessMaxAgeMinutes { get; set; }
public int? FreshnessWarnAgeMinutes { get; set; }
public bool RequireTransparencyInclusion { get; set; } = true;
public bool RequireCheckpoint { get; set; } = true;
public bool RequireBundleForSignatureValidation { get; set; } = false;
public bool RequireWitnessEndorsement { get; set; } = false;
}
public sealed class TransparencyWitnessOptions
{
public bool Enabled { get; set; }
public string? BaseUrl { get; set; }
public string? ApiKey { get; set; }
public int RequestTimeoutMs { get; set; } = 15_000;
public int CacheTtlSeconds { get; set; } = 900;
public string? AggregatorId { get; set; }
}
public sealed class SigningOptions
{
public IList<string> PreferredProviders { get; set; } = new List<string>();
public IList<SigningKeyOptions> Keys { get; set; } = new List<SigningKeyOptions>();
public SigningKmsOptions? Kms { get; set; }
}
public sealed class SigningKmsOptions
{
public bool Enabled { get; set; } = true;
public string? RootPath { get; set; }
public string? Password { get; set; }
public string Algorithm { get; set; } = "ES256K";
public int? KeyDerivationIterations { get; set; }
}
public sealed class SigningKeyOptions
{
public bool Enabled { get; set; } = true;
public string KeyId { get; set; } = string.Empty;
public string? ProviderKeyId { get; set; }
public string? Provider { get; set; }
public string? Mode { get; set; }
public string? Algorithm { get; set; }
public string? MaterialFormat { get; set; }
public string? Material { get; set; }
public string? MaterialPath { get; set; }
public string? MaterialPassphrase { get; set; }
public string? KmsKey { get; set; }
public string? KmsVersionId { get; set; }
public IList<string> CertificateChain { get; set; } = new List<string>();
}
}

View File

@@ -0,0 +1,50 @@
using System.Collections.Generic;
using StellaOps.Attestor.Core.Submission;
namespace StellaOps.Attestor.Core.Signing;
/// <summary>
/// Input contract for attestation signing requests.
/// </summary>
public sealed class AttestationSignRequest
{
/// <summary>
/// Identifier of the signing key to use.
/// </summary>
public string KeyId { get; set; } = string.Empty;
/// <summary>
/// DSSE payload type (MIME).
/// </summary>
public string PayloadType { get; set; } = string.Empty;
/// <summary>
/// Base64 encoded payload.
/// </summary>
public string PayloadBase64 { get; set; } = string.Empty;
/// <summary>
/// Optional signing mode override (e.g. keyless, kms).
/// </summary>
public string? Mode { get; set; }
/// <summary>
/// Optional certificate chain for keyless signatures.
/// </summary>
public IList<string> CertificateChain { get; set; } = new List<string>();
/// <summary>
/// Artifact metadata that will be embedded in the submission meta.
/// </summary>
public AttestorSubmissionRequest.ArtifactInfo Artifact { get; set; } = new();
/// <summary>
/// Preferred transparency log backend ("primary", "mirror", "both").
/// </summary>
public string LogPreference { get; set; } = "primary";
/// <summary>
/// Whether the resulting bundle should be archived.
/// </summary>
public bool Archive { get; set; } = true;
}

View File

@@ -0,0 +1,24 @@
using System;
using StellaOps.Attestor.Core.Submission;
namespace StellaOps.Attestor.Core.Signing;
/// <summary>
/// Represents the signed DSSE bundle ready for Rekor submission.
/// </summary>
public sealed class AttestationSignResult
{
public AttestorSubmissionRequest.SubmissionBundle Bundle { get; init; } = new();
public AttestorSubmissionRequest.SubmissionMeta Meta { get; init; } = new();
public string KeyId { get; init; } = string.Empty;
public string Algorithm { get; init; } = string.Empty;
public string Mode { get; init; } = string.Empty;
public string Provider { get; init; } = string.Empty;
public DateTimeOffset SignedAt { get; init; } = DateTimeOffset.UtcNow;
}

View File

@@ -0,0 +1,20 @@
using System;
namespace StellaOps.Attestor.Core.Signing;
public sealed class AttestorSigningException : Exception
{
public AttestorSigningException(string code, string message)
: base(message)
{
Code = string.IsNullOrWhiteSpace(code) ? "signing_error" : code;
}
public AttestorSigningException(string code, string message, Exception innerException)
: base(message, innerException)
{
Code = string.IsNullOrWhiteSpace(code) ? "signing_error" : code;
}
public string Code { get; }
}

View File

@@ -0,0 +1,36 @@
using System;
using System.Buffers.Binary;
using System.Text;
namespace StellaOps.Attestor.Core.Signing;
/// <summary>
/// Computes DSSE pre-authentication encoding (PAE) for payload signing.
/// </summary>
public static class DssePreAuthenticationEncoding
{
private static readonly byte[] Prefix = Encoding.ASCII.GetBytes("DSSEv1");
public static byte[] Compute(string payloadType, ReadOnlySpan<byte> payload)
{
var header = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var buffer = new byte[Prefix.Length + sizeof(long) + header.Length + sizeof(long) + payload.Length];
var offset = 0;
Prefix.CopyTo(buffer, offset);
offset += Prefix.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, sizeof(long)), (ulong)header.Length);
offset += sizeof(long);
header.CopyTo(buffer, offset);
offset += header.Length;
BinaryPrimitives.WriteUInt64BigEndian(buffer.AsSpan(offset, sizeof(long)), (ulong)payload.Length);
offset += sizeof(long);
payload.CopyTo(buffer.AsSpan(offset));
return buffer;
}
}

View File

@@ -0,0 +1,13 @@
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Attestor.Core.Submission;
namespace StellaOps.Attestor.Core.Signing;
public interface IAttestationSigningService
{
Task<AttestationSignResult> SignAsync(
AttestationSignRequest request,
SubmissionContext context,
CancellationToken cancellationToken = default);
}

View File

@@ -1,9 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,105 +1,128 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Attestor.Core.Storage;
/// <summary>
/// Canonical representation of a Rekor entry persisted in Mongo.
/// </summary>
public sealed class AttestorEntry
{
public string RekorUuid { get; init; } = string.Empty;
public ArtifactDescriptor Artifact { get; init; } = new();
public string BundleSha256 { get; init; } = string.Empty;
public long? Index { get; init; }
public ProofDescriptor? Proof { get; init; }
public LogDescriptor Log { get; init; } = new();
public DateTimeOffset CreatedAt { get; init; }
public string Status { get; init; } = "pending";
public SignerIdentityDescriptor SignerIdentity { get; init; } = new();
public LogReplicaDescriptor? Mirror { get; init; }
public sealed class ArtifactDescriptor
{
public string Sha256 { get; init; } = string.Empty;
public string Kind { get; init; } = string.Empty;
public string? ImageDigest { get; init; }
public string? SubjectUri { get; init; }
}
public sealed class ProofDescriptor
{
public CheckpointDescriptor? Checkpoint { get; init; }
public InclusionDescriptor? Inclusion { get; init; }
}
public sealed class CheckpointDescriptor
{
public string? Origin { get; init; }
public long Size { get; init; }
public string? RootHash { get; init; }
public DateTimeOffset? Timestamp { get; init; }
}
public sealed class InclusionDescriptor
{
public string? LeafHash { get; init; }
public IReadOnlyList<string> Path { get; init; } = Array.Empty<string>();
}
public sealed class LogDescriptor
{
public string Backend { get; init; } = "primary";
public string Url { get; init; } = string.Empty;
public string? LogId { get; init; }
}
public sealed class SignerIdentityDescriptor
{
public string Mode { get; init; } = string.Empty;
public string? Issuer { get; init; }
public string? SubjectAlternativeName { get; init; }
public string? KeyId { get; init; }
}
public sealed class LogReplicaDescriptor
{
public string Backend { get; init; } = string.Empty;
public string Url { get; init; } = string.Empty;
public string? Uuid { get; init; }
public long? Index { get; init; }
public string Status { get; init; } = "pending";
public ProofDescriptor? Proof { get; init; }
public string? LogId { get; init; }
public string? Error { get; init; }
}
}
using System;
using System.Collections.Generic;
namespace StellaOps.Attestor.Core.Storage;
/// <summary>
/// Canonical representation of a Rekor entry persisted in Mongo.
/// </summary>
public sealed class AttestorEntry
{
public string RekorUuid { get; init; } = string.Empty;
public ArtifactDescriptor Artifact { get; init; } = new();
public string BundleSha256 { get; init; } = string.Empty;
public long? Index { get; init; }
public ProofDescriptor? Proof { get; init; }
public WitnessDescriptor? Witness { get; init; }
public LogDescriptor Log { get; init; } = new();
public DateTimeOffset CreatedAt { get; init; }
public string Status { get; init; } = "pending";
public SignerIdentityDescriptor SignerIdentity { get; init; } = new();
public LogReplicaDescriptor? Mirror { get; init; }
public sealed class ArtifactDescriptor
{
public string Sha256 { get; init; } = string.Empty;
public string Kind { get; init; } = string.Empty;
public string? ImageDigest { get; init; }
public string? SubjectUri { get; init; }
}
public sealed class ProofDescriptor
{
public CheckpointDescriptor? Checkpoint { get; init; }
public InclusionDescriptor? Inclusion { get; init; }
}
public sealed class WitnessDescriptor
{
public string Aggregator { get; init; } = string.Empty;
public string Status { get; init; } = "unknown";
public string? RootHash { get; init; }
public DateTimeOffset RetrievedAt { get; init; }
public string? Statement { get; init; }
public string? Signature { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
}
public sealed class CheckpointDescriptor
{
public string? Origin { get; init; }
public long Size { get; init; }
public string? RootHash { get; init; }
public DateTimeOffset? Timestamp { get; init; }
}
public sealed class InclusionDescriptor
{
public string? LeafHash { get; init; }
public IReadOnlyList<string> Path { get; init; } = Array.Empty<string>();
}
public sealed class LogDescriptor
{
public string Backend { get; init; } = "primary";
public string Url { get; init; } = string.Empty;
public string? LogId { get; init; }
}
public sealed class SignerIdentityDescriptor
{
public string Mode { get; init; } = string.Empty;
public string? Issuer { get; init; }
public string? SubjectAlternativeName { get; init; }
public string? KeyId { get; init; }
}
public sealed class LogReplicaDescriptor
{
public string Backend { get; init; } = string.Empty;
public string Url { get; init; } = string.Empty;
public string? Uuid { get; init; }
public long? Index { get; init; }
public string Status { get; init; } = "pending";
public ProofDescriptor? Proof { get; init; }
public string? LogId { get; init; }
public string? Error { get; init; }
public WitnessDescriptor? Witness { get; init; }
}
}

View File

@@ -0,0 +1,83 @@
using System;
using System.Globalization;
using System.Text;
namespace StellaOps.Attestor.Core.Storage;
/// <summary>
/// Encodes and decodes pagination state for attestor entry listings.
/// </summary>
public static class AttestorEntryContinuationToken
{
private const char Separator = '|';
public readonly record struct Cursor(DateTimeOffset CreatedAt, string RekorUuid);
public static string Encode(DateTimeOffset createdAt, string rekorUuid)
{
ArgumentException.ThrowIfNullOrEmpty(rekorUuid);
var ticksText = createdAt.UtcTicks.ToString(CultureInfo.InvariantCulture);
var payload = string.Concat(ticksText, Separator, rekorUuid);
return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload));
}
public static Cursor Parse(string token)
{
if (!TryParse(token, out var cursor))
{
throw new FormatException("Invalid attestor continuation token.");
}
return cursor;
}
public static bool TryParse(string? token, out Cursor cursor)
{
cursor = default;
if (string.IsNullOrWhiteSpace(token))
{
return false;
}
byte[] data;
try
{
data = Convert.FromBase64String(token);
}
catch (FormatException)
{
return false;
}
var decoded = Encoding.UTF8.GetString(data);
var separatorIndex = decoded.IndexOf(Separator, StringComparison.Ordinal);
if (separatorIndex <= 0 || separatorIndex == decoded.Length - 1)
{
return false;
}
var ticksSpan = decoded.AsSpan(0, separatorIndex);
if (!long.TryParse(ticksSpan, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks))
{
return false;
}
var uuid = decoded[(separatorIndex + 1)..];
if (string.IsNullOrEmpty(uuid))
{
return false;
}
try
{
var createdAt = new DateTimeOffset(ticks, TimeSpan.Zero);
cursor = new Cursor(createdAt, uuid);
return true;
}
catch (ArgumentOutOfRangeException)
{
return false;
}
}
}

View File

@@ -0,0 +1,36 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Attestor.Core.Storage;
/// <summary>
/// Query parameters for listing attestor entries.
/// </summary>
public sealed class AttestorEntryQuery
{
public string? Subject { get; init; }
public string? Type { get; init; }
public string? Issuer { get; init; }
public string? Scope { get; init; }
public DateTimeOffset? CreatedAfter { get; init; }
public DateTimeOffset? CreatedBefore { get; init; }
public int PageSize { get; init; } = 50;
public string? ContinuationToken { get; init; }
}
/// <summary>
/// Represents a paginated page of attestor entries.
/// </summary>
public sealed class AttestorEntryQueryResult
{
public IReadOnlyList<AttestorEntry> Items { get; init; } = Array.Empty<AttestorEntry>();
public string? ContinuationToken { get; init; }
}

View File

@@ -3,7 +3,9 @@ using System.Threading.Tasks;
namespace StellaOps.Attestor.Core.Storage;
public interface IAttestorArchiveStore
{
Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default);
}
public interface IAttestorArchiveStore
{
Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default);
Task<AttestorArchiveBundle?> GetBundleAsync(string bundleSha256, string rekorUuid, CancellationToken cancellationToken = default);
}

View File

@@ -8,9 +8,11 @@ public interface IAttestorEntryRepository
{
Task<AttestorEntry?> GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default);
Task<AttestorEntry?> GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AttestorEntry>> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default);
Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default);
}
Task<AttestorEntry?> GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default);
Task<IReadOnlyList<AttestorEntry>> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default);
Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default);
Task<AttestorEntryQueryResult> QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default);
}

View File

@@ -1,83 +1,116 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.Core.Submission;
/// <summary>
/// Result returned to callers after processing a submission.
/// </summary>
public sealed class AttestorSubmissionResult
{
[JsonPropertyName("uuid")]
public string? Uuid { get; set; }
[JsonPropertyName("index")]
public long? Index { get; set; }
[JsonPropertyName("proof")]
public RekorProof? Proof { get; set; }
[JsonPropertyName("logURL")]
public string? LogUrl { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = "pending";
[JsonPropertyName("mirror")]
public MirrorLog? Mirror { get; set; }
public sealed class RekorProof
{
[JsonPropertyName("checkpoint")]
public Checkpoint? Checkpoint { get; set; }
[JsonPropertyName("inclusion")]
public InclusionProof? Inclusion { get; set; }
}
public sealed class Checkpoint
{
[JsonPropertyName("origin")]
public string? Origin { get; set; }
[JsonPropertyName("size")]
public long Size { get; set; }
[JsonPropertyName("rootHash")]
public string? RootHash { get; set; }
[JsonPropertyName("timestamp")]
public string? Timestamp { get; set; }
}
public sealed class InclusionProof
{
[JsonPropertyName("leafHash")]
public string? LeafHash { get; set; }
[JsonPropertyName("path")]
public IReadOnlyList<string> Path { get; init; } = Array.Empty<string>();
}
public sealed class MirrorLog
{
[JsonPropertyName("uuid")]
public string? Uuid { get; set; }
[JsonPropertyName("index")]
public long? Index { get; set; }
[JsonPropertyName("logURL")]
public string? LogUrl { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = "pending";
[JsonPropertyName("proof")]
public RekorProof? Proof { get; set; }
[JsonPropertyName("error")]
public string? Error { get; set; }
}
}
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.Core.Submission;
/// <summary>
/// Result returned to callers after processing a submission.
/// </summary>
public sealed class AttestorSubmissionResult
{
[JsonPropertyName("uuid")]
public string? Uuid { get; set; }
[JsonPropertyName("index")]
public long? Index { get; set; }
[JsonPropertyName("proof")]
public RekorProof? Proof { get; set; }
[JsonPropertyName("logURL")]
public string? LogUrl { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = "pending";
[JsonPropertyName("mirror")]
public MirrorLog? Mirror { get; set; }
[JsonPropertyName("witness")]
public WitnessStatement? Witness { get; set; }
public sealed class RekorProof
{
[JsonPropertyName("checkpoint")]
public Checkpoint? Checkpoint { get; set; }
[JsonPropertyName("inclusion")]
public InclusionProof? Inclusion { get; set; }
}
public sealed class Checkpoint
{
[JsonPropertyName("origin")]
public string? Origin { get; set; }
[JsonPropertyName("size")]
public long Size { get; set; }
[JsonPropertyName("rootHash")]
public string? RootHash { get; set; }
[JsonPropertyName("timestamp")]
public string? Timestamp { get; set; }
}
public sealed class InclusionProof
{
[JsonPropertyName("leafHash")]
public string? LeafHash { get; set; }
[JsonPropertyName("path")]
public IReadOnlyList<string> Path { get; init; } = Array.Empty<string>();
}
public sealed class MirrorLog
{
[JsonPropertyName("uuid")]
public string? Uuid { get; set; }
[JsonPropertyName("index")]
public long? Index { get; set; }
[JsonPropertyName("logURL")]
public string? LogUrl { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = "pending";
[JsonPropertyName("proof")]
public RekorProof? Proof { get; set; }
[JsonPropertyName("error")]
public string? Error { get; set; }
[JsonPropertyName("witness")]
public WitnessStatement? Witness { get; set; }
}
public sealed class WitnessStatement
{
[JsonPropertyName("aggregator")]
public string? Aggregator { get; set; }
[JsonPropertyName("status")]
public string Status { get; set; } = "unknown";
[JsonPropertyName("rootHash")]
public string? RootHash { get; set; }
[JsonPropertyName("retrievedAt")]
public string? RetrievedAt { get; set; }
[JsonPropertyName("statement")]
public string? Statement { get; set; }
[JsonPropertyName("signature")]
public string? Signature { get; set; }
[JsonPropertyName("keyId")]
public string? KeyId { get; set; }
[JsonPropertyName("error")]
public string? Error { get; set; }
}
}

View File

@@ -12,19 +12,24 @@ public sealed class AttestorSubmissionValidator
private static readonly string[] AllowedKinds = ["sbom", "report", "vex-export"];
private readonly IDsseCanonicalizer _canonicalizer;
private readonly HashSet<string> _allowedModes;
public AttestorSubmissionValidator(IDsseCanonicalizer canonicalizer, IEnumerable<string>? allowedModes = null)
{
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
_allowedModes = allowedModes is null
? new HashSet<string>(StringComparer.OrdinalIgnoreCase)
: new HashSet<string>(allowedModes, StringComparer.OrdinalIgnoreCase);
}
public async Task<AttestorSubmissionValidationResult> ValidateAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
private readonly HashSet<string> _allowedModes;
private readonly AttestorSubmissionConstraints _constraints;
public AttestorSubmissionValidator(
IDsseCanonicalizer canonicalizer,
IEnumerable<string>? allowedModes = null,
AttestorSubmissionConstraints? constraints = null)
{
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
_allowedModes = allowedModes is null
? new HashSet<string>(StringComparer.OrdinalIgnoreCase)
: new HashSet<string>(allowedModes, StringComparer.OrdinalIgnoreCase);
_constraints = constraints ?? AttestorSubmissionConstraints.Default;
}
public async Task<AttestorSubmissionValidationResult> ValidateAsync(AttestorSubmissionRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.Bundle is null)
{
@@ -42,20 +47,25 @@ public sealed class AttestorSubmissionValidator
}
if (string.IsNullOrWhiteSpace(request.Bundle.Dsse.PayloadBase64))
{
throw new AttestorValidationException("payload_missing", "DSSE payload must be provided.");
}
if (request.Bundle.Dsse.Signatures.Count == 0)
{
throw new AttestorValidationException("signature_missing", "At least one DSSE signature is required.");
}
if (_allowedModes.Count > 0 && !string.IsNullOrWhiteSpace(request.Bundle.Mode) && !_allowedModes.Contains(request.Bundle.Mode))
{
throw new AttestorValidationException("mode_not_allowed", $"Submission mode '{request.Bundle.Mode}' is not permitted.");
}
{
throw new AttestorValidationException("payload_missing", "DSSE payload must be provided.");
}
if (request.Bundle.Dsse.Signatures.Count == 0)
{
throw new AttestorValidationException("signature_missing", "At least one DSSE signature is required.");
}
if (request.Bundle.Dsse.Signatures.Count > _constraints.MaxSignatures)
{
throw new AttestorValidationException("signature_limit_exceeded", $"A maximum of {_constraints.MaxSignatures} DSSE signatures is permitted per submission.");
}
if (_allowedModes.Count > 0 && !string.IsNullOrWhiteSpace(request.Bundle.Mode) && !_allowedModes.Contains(request.Bundle.Mode))
{
throw new AttestorValidationException("mode_not_allowed", $"Submission mode '{request.Bundle.Mode}' is not permitted.");
}
if (request.Meta is null)
{
throw new AttestorValidationException("meta_missing", "Submission metadata is required.");
@@ -86,21 +96,31 @@ public sealed class AttestorSubmissionValidator
throw new AttestorValidationException("bundle_sha_invalid", "bundleSha256 must be a 64-character hex string.");
}
if (Array.IndexOf(AllowedKinds, request.Meta.Artifact.Kind) < 0)
{
throw new AttestorValidationException("artifact_kind_invalid", $"Artifact kind '{request.Meta.Artifact.Kind}' is not supported.");
}
if (!Base64UrlDecode(request.Bundle.Dsse.PayloadBase64, out _))
{
throw new AttestorValidationException("payload_invalid_base64", "DSSE payload must be valid base64.");
}
var canonical = await _canonicalizer.CanonicalizeAsync(request, cancellationToken).ConfigureAwait(false);
Span<byte> hash = stackalloc byte[32];
if (!SHA256.TryHashData(canonical, hash, out _))
{
throw new AttestorValidationException("bundle_sha_failure", "Failed to compute canonical bundle hash.");
if (Array.IndexOf(AllowedKinds, request.Meta.Artifact.Kind) < 0)
{
throw new AttestorValidationException("artifact_kind_invalid", $"Artifact kind '{request.Meta.Artifact.Kind}' is not supported.");
}
if (request.Bundle.CertificateChain.Count > _constraints.MaxCertificateChainEntries)
{
throw new AttestorValidationException("certificate_chain_too_long", $"Certificate chain length exceeds {_constraints.MaxCertificateChainEntries} entries.");
}
if (!Base64UrlDecode(request.Bundle.Dsse.PayloadBase64, out var payloadBytes))
{
throw new AttestorValidationException("payload_invalid_base64", "DSSE payload must be valid base64.");
}
if (payloadBytes.Length > _constraints.MaxPayloadBytes)
{
throw new AttestorValidationException("payload_too_large", $"DSSE payload exceeds {_constraints.MaxPayloadBytes} bytes limit.");
}
var canonical = await _canonicalizer.CanonicalizeAsync(request, cancellationToken).ConfigureAwait(false);
Span<byte> hash = stackalloc byte[32];
if (!SHA256.TryHashData(canonical, hash, out _))
{
throw new AttestorValidationException("bundle_sha_failure", "Failed to compute canonical bundle hash.");
}
var hashHex = Convert.ToHexString(hash).ToLowerInvariant();
@@ -172,5 +192,41 @@ public sealed class AttestorSubmissionValidator
}
return value;
}
}
}
}
public sealed class AttestorSubmissionConstraints
{
public static AttestorSubmissionConstraints Default { get; } = new();
public AttestorSubmissionConstraints(
int maxPayloadBytes = 2 * 1024 * 1024,
int maxSignatures = 6,
int maxCertificateChainEntries = 6)
{
if (maxPayloadBytes <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxPayloadBytes), "Max payload bytes must be positive.");
}
if (maxSignatures <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxSignatures), "Max signatures must be positive.");
}
if (maxCertificateChainEntries <= 0)
{
throw new ArgumentOutOfRangeException(nameof(maxCertificateChainEntries), "Max certificate chain entries must be positive.");
}
MaxPayloadBytes = maxPayloadBytes;
MaxSignatures = maxSignatures;
MaxCertificateChainEntries = maxCertificateChainEntries;
}
public int MaxPayloadBytes { get; }
public int MaxSignatures { get; }
public int MaxCertificateChainEntries { get; }
}

View File

@@ -0,0 +1,9 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.Core.Transparency;
public interface ITransparencyWitnessClient
{
Task<TransparencyWitnessObservation?> GetObservationAsync(TransparencyWitnessRequest request, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,22 @@
using System;
namespace StellaOps.Attestor.Core.Transparency;
public sealed class TransparencyWitnessObservation
{
public string Aggregator { get; init; } = string.Empty;
public string Status { get; init; } = "unknown";
public string? RootHash { get; init; }
public DateTimeOffset RetrievedAt { get; init; } = DateTimeOffset.UtcNow;
public string? Statement { get; init; }
public string? Signature { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
}

View File

@@ -0,0 +1,9 @@
using System;
namespace StellaOps.Attestor.Core.Transparency;
public sealed record TransparencyWitnessRequest(
string Uuid,
string Backend,
Uri BackendUrl,
string? CheckpointRootHash);

View File

@@ -5,11 +5,23 @@ namespace StellaOps.Attestor.Core.Verification;
/// </summary>
public sealed class AttestorVerificationRequest
{
public string? Uuid { get; set; }
public Submission.AttestorSubmissionRequest.SubmissionBundle? Bundle { get; set; }
public string? ArtifactSha256 { get; set; }
public bool RefreshProof { get; set; }
}
public string? Uuid { get; set; }
public Submission.AttestorSubmissionRequest.SubmissionBundle? Bundle { get; set; }
public string? ArtifactSha256 { get; set; }
public string? Subject { get; set; }
public string? EnvelopeId { get; set; }
public string? PolicyVersion { get; set; }
public bool RefreshProof { get; set; }
/// <summary>
/// When true, verification does not attempt to contact external transparency logs and
/// surfaces issues for missing proofs instead.
/// </summary>
public bool Offline { get; set; }
}

View File

@@ -11,11 +11,13 @@ public sealed class AttestorVerificationResult
public long? Index { get; init; }
public string? LogUrl { get; init; }
public DateTimeOffset CheckedAt { get; init; } = DateTimeOffset.UtcNow;
public string Status { get; init; } = "unknown";
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
}
public string? LogUrl { get; init; }
public DateTimeOffset CheckedAt { get; init; } = DateTimeOffset.UtcNow;
public string Status { get; init; } = "unknown";
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
public VerificationReport? Report { get; init; }
}

View File

@@ -0,0 +1,13 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.Core.Verification;
public interface IAttestorVerificationCache
{
Task<AttestorVerificationResult?> GetAsync(string subject, string envelopeId, string policyVersion, CancellationToken cancellationToken = default);
Task SetAsync(string subject, string envelopeId, string policyVersion, AttestorVerificationResult result, CancellationToken cancellationToken = default);
Task InvalidateSubjectAsync(string subject, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,185 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
namespace StellaOps.Attestor.Core.Verification;
public sealed class PolicyEvaluationResult
{
public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped;
public string PolicyId { get; init; } = "default";
public string PolicyVersion { get; init; } = "0.0.0";
public string Verdict { get; init; } = "unknown";
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
public IReadOnlyDictionary<string, string> Attributes { get; init; } =
ImmutableDictionary<string, string>.Empty;
}
public sealed class IssuerEvaluationResult
{
public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped;
public string Mode { get; init; } = "unknown";
public string? Issuer { get; init; }
public string? SubjectAlternativeName { get; init; }
public string? KeyId { get; init; }
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
}
public sealed class FreshnessEvaluationResult
{
public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped;
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset EvaluatedAt { get; init; }
public TimeSpan Age { get; init; }
public TimeSpan? MaxAge { get; init; }
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
}
public sealed class SignatureEvaluationResult
{
public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped;
public bool BundleProvided { get; init; }
public int TotalSignatures { get; init; }
public int VerifiedSignatures { get; init; }
public int RequiredSignatures { get; init; }
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
}
public sealed class TransparencyEvaluationResult
{
public VerificationSectionStatus Status { get; init; } = VerificationSectionStatus.Skipped;
public bool ProofPresent { get; init; }
public bool CheckpointPresent { get; init; }
public bool InclusionPathPresent { get; init; }
public bool WitnessPresent { get; init; }
public bool WitnessMatchesRoot { get; init; }
public string? WitnessAggregator { get; init; }
public string WitnessStatus { get; init; } = "missing";
public IReadOnlyList<string> Issues { get; init; } = Array.Empty<string>();
}
public sealed class VerificationReport
{
public VerificationSectionStatus OverallStatus { get; }
public PolicyEvaluationResult Policy { get; }
public IssuerEvaluationResult Issuer { get; }
public FreshnessEvaluationResult Freshness { get; }
public SignatureEvaluationResult Signatures { get; }
public TransparencyEvaluationResult Transparency { get; }
public IReadOnlyList<string> Issues { get; }
public VerificationReport(
PolicyEvaluationResult policy,
IssuerEvaluationResult issuer,
FreshnessEvaluationResult freshness,
SignatureEvaluationResult signatures,
TransparencyEvaluationResult transparency)
{
Policy = policy ?? throw new ArgumentNullException(nameof(policy));
Issuer = issuer ?? throw new ArgumentNullException(nameof(issuer));
Freshness = freshness ?? throw new ArgumentNullException(nameof(freshness));
Signatures = signatures ?? throw new ArgumentNullException(nameof(signatures));
Transparency = transparency ?? throw new ArgumentNullException(nameof(transparency));
OverallStatus = DetermineOverallStatus(policy, issuer, freshness, signatures, transparency);
Issues = AggregateIssues(policy, issuer, freshness, signatures, transparency);
}
public bool Succeeded => OverallStatus == VerificationSectionStatus.Pass || OverallStatus == VerificationSectionStatus.Warn;
private static VerificationSectionStatus DetermineOverallStatus(params object[] sections)
{
var statuses = sections
.OfType<object>()
.Select(section => section switch
{
PolicyEvaluationResult p => p.Status,
IssuerEvaluationResult i => i.Status,
FreshnessEvaluationResult f => f.Status,
SignatureEvaluationResult s => s.Status,
TransparencyEvaluationResult t => t.Status,
_ => VerificationSectionStatus.Skipped
})
.ToArray();
if (statuses.Any(status => status == VerificationSectionStatus.Fail))
{
return VerificationSectionStatus.Fail;
}
if (statuses.Any(status => status == VerificationSectionStatus.Warn))
{
return VerificationSectionStatus.Warn;
}
if (statuses.All(status => status == VerificationSectionStatus.Skipped))
{
return VerificationSectionStatus.Skipped;
}
return VerificationSectionStatus.Pass;
}
private static IReadOnlyList<string> AggregateIssues(params object[] sections)
{
var set = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
foreach (var section in sections)
{
var issues = section switch
{
PolicyEvaluationResult p => p.Issues,
IssuerEvaluationResult i => i.Issues,
FreshnessEvaluationResult f => f.Issues,
SignatureEvaluationResult s => s.Issues,
TransparencyEvaluationResult t => t.Issues,
_ => Array.Empty<string>()
};
foreach (var issue in issues)
{
if (!string.IsNullOrWhiteSpace(issue))
{
set.Add(issue);
}
}
}
return set.ToArray();
}
}

View File

@@ -0,0 +1,12 @@
namespace StellaOps.Attestor.Core.Verification;
/// <summary>
/// Represents the evaluation status of an individual verification section.
/// </summary>
public enum VerificationSectionStatus
{
Pass,
Warn,
Fail,
Skipped
}

View File

@@ -0,0 +1,240 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Hosting;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Attestor.Core.Bulk;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Infrastructure.Bulk;
internal sealed class BulkVerificationWorker : BackgroundService
{
private readonly IBulkVerificationJobStore _jobStore;
private readonly IAttestorVerificationService _verificationService;
private readonly AttestorMetrics _metrics;
private readonly AttestorOptions _options;
private readonly ILogger<BulkVerificationWorker> _logger;
private readonly TimeProvider _timeProvider;
public BulkVerificationWorker(
IBulkVerificationJobStore jobStore,
IAttestorVerificationService verificationService,
AttestorMetrics metrics,
IOptions<AttestorOptions> options,
TimeProvider timeProvider,
ILogger<BulkVerificationWorker> logger)
{
_jobStore = jobStore ?? throw new ArgumentNullException(nameof(jobStore));
_verificationService = verificationService ?? throw new ArgumentNullException(nameof(verificationService));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
{
var pollDelay = TimeSpan.FromSeconds(Math.Max(1, _options.BulkVerification.WorkerPollSeconds));
while (!stoppingToken.IsCancellationRequested)
{
try
{
var job = await _jobStore.TryAcquireAsync(stoppingToken).ConfigureAwait(false);
if (job is null)
{
await Task.Delay(pollDelay, stoppingToken).ConfigureAwait(false);
continue;
}
await ProcessJobAsync(job, stoppingToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
{
break;
}
catch (Exception ex)
{
_logger.LogError(ex, "Bulk verification worker loop failed.");
await Task.Delay(pollDelay, stoppingToken).ConfigureAwait(false);
}
}
}
internal async Task ProcessJobAsync(BulkVerificationJob job, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(job);
_logger.LogInformation("Processing bulk verification job {JobId} with {ItemCount} items.", job.Id, job.Items.Count);
job.StartedAt ??= _timeProvider.GetUtcNow();
if (!await PersistAsync(job, cancellationToken).ConfigureAwait(false))
{
_logger.LogWarning("Failed to persist initial state for job {JobId}.", job.Id);
}
var itemDelay = _options.BulkVerification.ItemDelayMilliseconds > 0
? TimeSpan.FromMilliseconds(_options.BulkVerification.ItemDelayMilliseconds)
: TimeSpan.Zero;
foreach (var item in job.Items.OrderBy(i => i.Index))
{
cancellationToken.ThrowIfCancellationRequested();
if (item.Status is not BulkVerificationItemStatus.Pending)
{
continue;
}
await ExecuteItemAsync(job, item, cancellationToken).ConfigureAwait(false);
if (itemDelay > TimeSpan.Zero)
{
try
{
await Task.Delay(itemDelay, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
break;
}
}
}
job.CompletedAt = _timeProvider.GetUtcNow();
job.Status = job.FailureReason is null ? BulkVerificationJobStatus.Completed : BulkVerificationJobStatus.Failed;
if (!await PersistAsync(job, cancellationToken).ConfigureAwait(false))
{
_logger.LogWarning("Failed to persist completion state for job {JobId}.", job.Id);
}
var durationSeconds = (job.CompletedAt - job.StartedAt)?.TotalSeconds ?? 0;
var statusTag = job.Status == BulkVerificationJobStatus.Completed && job.FailedCount == 0 ? "succeeded" :
job.Status == BulkVerificationJobStatus.Completed ? "completed_with_failures" : "failed";
_metrics.BulkJobsTotal.Add(1, new KeyValuePair<string, object?>("status", statusTag));
_metrics.BulkJobDuration.Record(durationSeconds, new KeyValuePair<string, object?>("status", statusTag));
_logger.LogInformation("Finished bulk verification job {JobId}. Ran {Processed} items (success: {Success}, failed: {Failed}).",
job.Id, job.ProcessedCount, job.SucceededCount, job.FailedCount);
}
private async Task ExecuteItemAsync(BulkVerificationJob job, BulkVerificationJobItem item, CancellationToken cancellationToken)
{
item.Status = BulkVerificationItemStatus.Running;
item.StartedAt = _timeProvider.GetUtcNow();
await PersistAsync(job, cancellationToken).ConfigureAwait(false);
var statusTag = "failed";
try
{
var request = new AttestorVerificationRequest
{
Uuid = item.Request.Uuid,
ArtifactSha256 = item.Request.ArtifactSha256,
Subject = item.Request.Subject,
EnvelopeId = item.Request.EnvelopeId,
PolicyVersion = item.Request.PolicyVersion,
RefreshProof = item.Request.RefreshProof
};
var result = await _verificationService.VerifyAsync(request, cancellationToken).ConfigureAwait(false);
item.Result = result;
item.CompletedAt = _timeProvider.GetUtcNow();
item.Status = result.Ok ? BulkVerificationItemStatus.Succeeded : BulkVerificationItemStatus.Failed;
statusTag = item.Status == BulkVerificationItemStatus.Succeeded ? "succeeded" : "verification_failed";
job.ProcessedCount++;
if (item.Status == BulkVerificationItemStatus.Succeeded)
{
job.SucceededCount++;
}
else
{
job.FailedCount++;
}
}
catch (AttestorVerificationException verificationEx)
{
item.CompletedAt = _timeProvider.GetUtcNow();
item.Status = BulkVerificationItemStatus.Failed;
item.Error = $"{verificationEx.Code}:{verificationEx.Message}";
job.ProcessedCount++;
job.FailedCount++;
job.FailureReason ??= "item_failure";
statusTag = "verification_error";
}
catch (Exception ex)
{
item.CompletedAt = _timeProvider.GetUtcNow();
item.Status = BulkVerificationItemStatus.Failed;
item.Error = ex.Message;
job.ProcessedCount++;
job.FailedCount++;
job.FailureReason ??= "worker_exception";
_logger.LogError(ex, "Bulk verification item {ItemIndex} failed for job {JobId}.", item.Index, job.Id);
statusTag = "exception";
}
if (!await PersistAsync(job, cancellationToken).ConfigureAwait(false))
{
_logger.LogWarning("Failed to persist progress for job {JobId} item {ItemIndex}.", job.Id, item.Index);
}
_metrics.BulkItemsTotal.Add(1, new KeyValuePair<string, object?>("status", statusTag));
}
private async Task<bool> PersistAsync(BulkVerificationJob job, CancellationToken cancellationToken)
{
for (var attempt = 0; attempt < 3; attempt++)
{
if (await _jobStore.TryUpdateAsync(job, cancellationToken).ConfigureAwait(false))
{
return true;
}
var refreshed = await _jobStore.GetAsync(job.Id, cancellationToken).ConfigureAwait(false);
if (refreshed is null)
{
return false;
}
Synchronize(job, refreshed);
}
return false;
}
private static void Synchronize(BulkVerificationJob target, BulkVerificationJob source)
{
target.Version = source.Version;
target.Status = source.Status;
target.CreatedAt = source.CreatedAt;
target.StartedAt = source.StartedAt;
target.CompletedAt = source.CompletedAt;
target.ProcessedCount = source.ProcessedCount;
target.SucceededCount = source.SucceededCount;
target.FailedCount = source.FailedCount;
target.FailureReason = source.FailureReason;
var sourceItems = source.Items.ToDictionary(i => i.Index);
foreach (var item in target.Items)
{
if (sourceItems.TryGetValue(item.Index, out var updated))
{
item.Status = updated.Status;
item.StartedAt = updated.StartedAt;
item.CompletedAt = updated.CompletedAt;
item.Result = updated.Result;
item.Error = updated.Error;
}
}
}
}

View File

@@ -0,0 +1,343 @@
using System;
using System.Collections.Generic;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Bulk;
using StellaOps.Attestor.Core.Verification;
namespace StellaOps.Attestor.Infrastructure.Bulk;
internal sealed class MongoBulkVerificationJobStore : IBulkVerificationJobStore
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web);
private readonly IMongoCollection<JobDocument> _collection;
public MongoBulkVerificationJobStore(IMongoCollection<JobDocument> collection)
{
_collection = collection ?? throw new ArgumentNullException(nameof(collection));
}
public async Task<BulkVerificationJob> CreateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
job.Version = 0;
var document = JobDocument.FromDomain(job, SerializerOptions);
await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false);
job.Version = document.Version;
return job;
}
public async Task<BulkVerificationJob?> GetAsync(string jobId, CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(jobId))
{
return null;
}
var filter = Builders<JobDocument>.Filter.Eq(doc => doc.Id, jobId);
var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain(SerializerOptions);
}
public async Task<BulkVerificationJob?> TryAcquireAsync(CancellationToken cancellationToken = default)
{
var filter = Builders<JobDocument>.Filter.Eq(doc => doc.Status, BulkVerificationJobStatus.Queued);
var update = Builders<JobDocument>.Update
.Set(doc => doc.Status, BulkVerificationJobStatus.Running)
.Set(doc => doc.StartedAt, DateTimeOffset.UtcNow.UtcDateTime)
.Inc(doc => doc.Version, 1);
var options = new FindOneAndUpdateOptions<JobDocument>
{
Sort = Builders<JobDocument>.Sort.Ascending(doc => doc.CreatedAt),
ReturnDocument = ReturnDocument.After
};
var document = await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false);
return document?.ToDomain(SerializerOptions);
}
public async Task<bool> TryUpdateAsync(BulkVerificationJob job, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(job);
var currentVersion = job.Version;
var replacement = JobDocument.FromDomain(job, SerializerOptions);
replacement.Version = currentVersion + 1;
var filter = Builders<JobDocument>.Filter.Where(doc => doc.Id == job.Id && doc.Version == currentVersion);
var result = await _collection.ReplaceOneAsync(filter, replacement, cancellationToken: cancellationToken).ConfigureAwait(false);
if (result.ModifiedCount == 0)
{
return false;
}
job.Version = replacement.Version;
return true;
}
public async Task<int> CountQueuedAsync(CancellationToken cancellationToken = default)
{
var filter = Builders<JobDocument>.Filter.Eq(doc => doc.Status, BulkVerificationJobStatus.Queued);
var count = await _collection.CountDocumentsAsync(filter, cancellationToken: cancellationToken).ConfigureAwait(false);
return Convert.ToInt32(count);
}
internal sealed class JobDocument
{
[BsonId]
[BsonElement("_id")]
public string Id { get; set; } = string.Empty;
[BsonElement("version")]
public int Version { get; set; }
[BsonElement("status")]
[BsonRepresentation(BsonType.String)]
public BulkVerificationJobStatus Status { get; set; }
[BsonElement("createdAt")]
public DateTime CreatedAt { get; set; }
[BsonElement("startedAt")]
[BsonIgnoreIfNull]
public DateTime? StartedAt { get; set; }
[BsonElement("completedAt")]
[BsonIgnoreIfNull]
public DateTime? CompletedAt { get; set; }
[BsonElement("context")]
public JobContextDocument Context { get; set; } = new();
[BsonElement("items")]
public List<JobItemDocument> Items { get; set; } = new();
[BsonElement("processed")]
public int ProcessedCount { get; set; }
[BsonElement("succeeded")]
public int SucceededCount { get; set; }
[BsonElement("failed")]
public int FailedCount { get; set; }
[BsonElement("failureReason")]
[BsonIgnoreIfNull]
public string? FailureReason { get; set; }
public static JobDocument FromDomain(BulkVerificationJob job, JsonSerializerOptions serializerOptions)
{
return new JobDocument
{
Id = job.Id,
Version = job.Version,
Status = job.Status,
CreatedAt = job.CreatedAt.UtcDateTime,
StartedAt = job.StartedAt?.UtcDateTime,
CompletedAt = job.CompletedAt?.UtcDateTime,
Context = JobContextDocument.FromDomain(job.Context),
Items = JobItemDocument.FromDomain(job.Items, serializerOptions),
ProcessedCount = job.ProcessedCount,
SucceededCount = job.SucceededCount,
FailedCount = job.FailedCount,
FailureReason = job.FailureReason
};
}
public BulkVerificationJob ToDomain(JsonSerializerOptions serializerOptions)
{
return new BulkVerificationJob
{
Id = Id,
Version = Version,
Status = Status,
CreatedAt = DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc),
StartedAt = StartedAt is null ? null : DateTime.SpecifyKind(StartedAt.Value, DateTimeKind.Utc),
CompletedAt = CompletedAt is null ? null : DateTime.SpecifyKind(CompletedAt.Value, DateTimeKind.Utc),
Context = Context.ToDomain(),
Items = JobItemDocument.ToDomain(Items, serializerOptions),
ProcessedCount = ProcessedCount,
SucceededCount = SucceededCount,
FailedCount = FailedCount,
FailureReason = FailureReason
};
}
}
internal sealed class JobContextDocument
{
[BsonElement("tenant")]
[BsonIgnoreIfNull]
public string? Tenant { get; set; }
[BsonElement("requestedBy")]
[BsonIgnoreIfNull]
public string? RequestedBy { get; set; }
[BsonElement("clientId")]
[BsonIgnoreIfNull]
public string? ClientId { get; set; }
[BsonElement("scopes")]
public List<string> Scopes { get; set; } = new();
public static JobContextDocument FromDomain(BulkVerificationJobContext context)
{
return new JobContextDocument
{
Tenant = context.Tenant,
RequestedBy = context.RequestedBy,
ClientId = context.ClientId,
Scopes = new List<string>(context.Scopes)
};
}
public BulkVerificationJobContext ToDomain()
{
return new BulkVerificationJobContext
{
Tenant = Tenant,
RequestedBy = RequestedBy,
ClientId = ClientId,
Scopes = new List<string>(Scopes ?? new List<string>())
};
}
}
internal sealed class JobItemDocument
{
[BsonElement("index")]
public int Index { get; set; }
[BsonElement("request")]
public ItemRequestDocument Request { get; set; } = new();
[BsonElement("status")]
[BsonRepresentation(BsonType.String)]
public BulkVerificationItemStatus Status { get; set; }
[BsonElement("startedAt")]
[BsonIgnoreIfNull]
public DateTime? StartedAt { get; set; }
[BsonElement("completedAt")]
[BsonIgnoreIfNull]
public DateTime? CompletedAt { get; set; }
[BsonElement("result")]
[BsonIgnoreIfNull]
public string? ResultJson { get; set; }
[BsonElement("error")]
[BsonIgnoreIfNull]
public string? Error { get; set; }
public static List<JobItemDocument> FromDomain(IEnumerable<BulkVerificationJobItem> items, JsonSerializerOptions serializerOptions)
{
var list = new List<JobItemDocument>();
foreach (var item in items)
{
list.Add(new JobItemDocument
{
Index = item.Index,
Request = ItemRequestDocument.FromDomain(item.Request),
Status = item.Status,
StartedAt = item.StartedAt?.UtcDateTime,
CompletedAt = item.CompletedAt?.UtcDateTime,
ResultJson = item.Result is null ? null : JsonSerializer.Serialize(item.Result, serializerOptions),
Error = item.Error
});
}
return list;
}
public static IList<BulkVerificationJobItem> ToDomain(IEnumerable<JobItemDocument> documents, JsonSerializerOptions serializerOptions)
{
var list = new List<BulkVerificationJobItem>();
foreach (var document in documents)
{
AttestorVerificationResult? result = null;
if (!string.IsNullOrWhiteSpace(document.ResultJson))
{
result = JsonSerializer.Deserialize<AttestorVerificationResult>(document.ResultJson, serializerOptions);
}
list.Add(new BulkVerificationJobItem
{
Index = document.Index,
Request = document.Request.ToDomain(),
Status = document.Status,
StartedAt = document.StartedAt is null ? null : DateTime.SpecifyKind(document.StartedAt.Value, DateTimeKind.Utc),
CompletedAt = document.CompletedAt is null ? null : DateTime.SpecifyKind(document.CompletedAt.Value, DateTimeKind.Utc),
Result = result,
Error = document.Error
});
}
return list;
}
}
internal sealed class ItemRequestDocument
{
[BsonElement("uuid")]
[BsonIgnoreIfNull]
public string? Uuid { get; set; }
[BsonElement("artifactSha256")]
[BsonIgnoreIfNull]
public string? ArtifactSha256 { get; set; }
[BsonElement("subject")]
[BsonIgnoreIfNull]
public string? Subject { get; set; }
[BsonElement("envelopeId")]
[BsonIgnoreIfNull]
public string? EnvelopeId { get; set; }
[BsonElement("policyVersion")]
[BsonIgnoreIfNull]
public string? PolicyVersion { get; set; }
[BsonElement("refreshProof")]
public bool RefreshProof { get; set; }
public static ItemRequestDocument FromDomain(BulkVerificationItemRequest request)
{
return new ItemRequestDocument
{
Uuid = request.Uuid,
ArtifactSha256 = request.ArtifactSha256,
Subject = request.Subject,
EnvelopeId = request.EnvelopeId,
PolicyVersion = request.PolicyVersion,
RefreshProof = request.RefreshProof
};
}
public BulkVerificationItemRequest ToDomain()
{
return new BulkVerificationItemRequest
{
Uuid = Uuid,
ArtifactSha256 = ArtifactSha256,
Subject = Subject,
EnvelopeId = EnvelopeId,
PolicyVersion = PolicyVersion,
RefreshProof = RefreshProof
};
}
}
}

View File

@@ -0,0 +1,269 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Attestor.Core.Offline;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Infrastructure.Storage;
namespace StellaOps.Attestor.Infrastructure.Offline;
internal sealed class AttestorBundleService : IAttestorBundleService
{
private readonly IAttestorEntryRepository _repository;
private readonly IAttestorArchiveStore _archiveStore;
private readonly TimeProvider _timeProvider;
private readonly AttestorOptions _options;
private readonly ILogger<AttestorBundleService> _logger;
public AttestorBundleService(
IAttestorEntryRepository repository,
IAttestorArchiveStore archiveStore,
TimeProvider timeProvider,
IOptions<AttestorOptions> options,
ILogger<AttestorBundleService> logger)
{
_repository = repository;
_archiveStore = archiveStore;
_timeProvider = timeProvider;
_options = options.Value;
_logger = logger;
}
public async Task<AttestorBundlePackage> ExportAsync(AttestorBundleExportRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var (entries, continuationToken) = await ResolveEntriesAsync(request, cancellationToken).ConfigureAwait(false);
var items = new List<AttestorBundleItem>(entries.Count);
foreach (var entry in entries
.OrderBy(e => e.CreatedAt)
.ThenBy(e => e.RekorUuid, StringComparer.Ordinal))
{
var archiveBundle = await _archiveStore.GetBundleAsync(entry.BundleSha256, entry.RekorUuid, cancellationToken).ConfigureAwait(false);
if (archiveBundle is null)
{
_logger.LogWarning("Archive bundle for {Uuid} ({BundleSha}) unavailable; exporting metadata only.", entry.RekorUuid, entry.BundleSha256);
items.Add(new AttestorBundleItem
{
Entry = entry,
CanonicalBundle = string.Empty,
Metadata = new Dictionary<string, string>
{
["archive.missing"] = "true"
}
});
continue;
}
var metadata = archiveBundle.Metadata ?? new Dictionary<string, string>();
if (!metadata.ContainsKey("logUrl"))
{
metadata = new Dictionary<string, string>(metadata)
{
["logUrl"] = entry.Log.Url
};
}
items.Add(new AttestorBundleItem
{
Entry = entry,
CanonicalBundle = Convert.ToBase64String(archiveBundle.CanonicalBundleJson),
Proof = archiveBundle.ProofJson.Length > 0 ? Convert.ToBase64String(archiveBundle.ProofJson) : null,
Metadata = metadata
});
}
return new AttestorBundlePackage
{
Version = AttestorBundleVersions.Current,
GeneratedAt = _timeProvider.GetUtcNow(),
Items = items,
ContinuationToken = continuationToken
};
}
public async Task<AttestorBundleImportResult> ImportAsync(AttestorBundlePackage package, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(package);
if (!_options.S3.Enabled || _archiveStore is NullAttestorArchiveStore)
{
var skippedCount = package.Items?.Count ?? 0;
_logger.LogWarning("Attestor archive store disabled; skipping import for {Count} bundle item(s).", skippedCount);
return new AttestorBundleImportResult
{
Imported = 0,
Updated = 0,
Skipped = skippedCount,
Issues = new[] { "archive_disabled" }
};
}
if (package.Items is null || package.Items.Count == 0)
{
return new AttestorBundleImportResult
{
Imported = 0,
Updated = 0,
Skipped = 0,
Issues = Array.Empty<string>()
};
}
var imported = 0;
var updated = 0;
var skipped = 0;
var issues = new List<string>();
foreach (var item in package.Items)
{
if (item.Entry is null)
{
skipped++;
issues.Add("entry_missing");
continue;
}
if (string.IsNullOrWhiteSpace(item.Entry.RekorUuid))
{
skipped++;
issues.Add("uuid_missing");
continue;
}
if (string.IsNullOrWhiteSpace(item.Entry.BundleSha256))
{
skipped++;
issues.Add($"bundle_sha_missing:{item.Entry.RekorUuid}");
continue;
}
if (string.IsNullOrWhiteSpace(item.CanonicalBundle))
{
skipped++;
issues.Add($"bundle_payload_missing:{item.Entry.RekorUuid}");
continue;
}
byte[] canonicalBytes;
try
{
canonicalBytes = Convert.FromBase64String(item.CanonicalBundle);
}
catch (FormatException)
{
skipped++;
issues.Add($"bundle_payload_invalid_base64:{item.Entry.RekorUuid}");
continue;
}
var computedSha = Convert.ToHexString(SHA256.HashData(canonicalBytes)).ToLowerInvariant();
if (!string.Equals(computedSha, item.Entry.BundleSha256, StringComparison.OrdinalIgnoreCase))
{
skipped++;
issues.Add($"bundle_hash_mismatch:{item.Entry.RekorUuid}");
continue;
}
byte[] proofBytes = Array.Empty<byte>();
if (!string.IsNullOrEmpty(item.Proof))
{
try
{
proofBytes = Convert.FromBase64String(item.Proof);
}
catch (FormatException)
{
issues.Add($"proof_invalid_base64:{item.Entry.RekorUuid}");
}
}
var archiveBundle = new AttestorArchiveBundle
{
RekorUuid = item.Entry.RekorUuid,
ArtifactSha256 = item.Entry.Artifact.Sha256,
BundleSha256 = item.Entry.BundleSha256,
CanonicalBundleJson = canonicalBytes,
ProofJson = proofBytes,
Metadata = item.Metadata ?? new Dictionary<string, string>()
};
await _archiveStore.ArchiveBundleAsync(archiveBundle, cancellationToken).ConfigureAwait(false);
var existing = await _repository.GetByUuidAsync(item.Entry.RekorUuid, cancellationToken).ConfigureAwait(false);
if (existing is null)
{
imported++;
}
else
{
updated++;
}
await _repository.SaveAsync(item.Entry, cancellationToken).ConfigureAwait(false);
}
return new AttestorBundleImportResult
{
Imported = imported,
Updated = updated,
Skipped = skipped,
Issues = issues
};
}
private async Task<(List<AttestorEntry> Entries, string? ContinuationToken)> ResolveEntriesAsync(AttestorBundleExportRequest request, CancellationToken cancellationToken)
{
var entries = new List<AttestorEntry>();
if (request.Uuids is { Count: > 0 })
{
foreach (var uuid in request.Uuids.Where(u => !string.IsNullOrWhiteSpace(u)).Distinct(StringComparer.OrdinalIgnoreCase))
{
var entry = await _repository.GetByUuidAsync(uuid, cancellationToken).ConfigureAwait(false);
if (entry is null)
{
_logger.LogWarning("Attestation {Uuid} not found; skipping export entry.", uuid);
continue;
}
entries.Add(entry);
}
return (entries, null);
}
var limit = request.Limit.HasValue
? Math.Clamp(request.Limit.Value, 1, 200)
: 100;
var query = new AttestorEntryQuery
{
Subject = request.Subject,
Type = request.Type,
Issuer = request.Issuer,
Scope = request.Scope,
CreatedAfter = request.CreatedAfter,
CreatedBefore = request.CreatedBefore,
PageSize = limit,
ContinuationToken = request.ContinuationToken
};
var result = await _repository.QueryAsync(query, cancellationToken).ConfigureAwait(false);
if (result.Items.Count == 0)
{
_logger.LogInformation("No attestor entries matched export query.");
}
entries.AddRange(result.Items.Take(limit));
return (entries, result.ContinuationToken);
}
}

View File

@@ -1,21 +1,24 @@
using System;
using Amazon.Runtime;
using Amazon.S3;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StackExchange.Redis;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Infrastructure.Rekor;
using StellaOps.Attestor.Infrastructure.Storage;
using StellaOps.Attestor.Infrastructure.Submission;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Infrastructure.Verification;
using Amazon.Runtime;
using Amazon.S3;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StackExchange.Redis;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Rekor;
using StellaOps.Attestor.Core.Storage;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Attestor.Core.Transparency;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Infrastructure.Rekor;
using StellaOps.Attestor.Infrastructure.Storage;
using StellaOps.Attestor.Infrastructure.Submission;
using StellaOps.Attestor.Infrastructure.Transparency;
using StellaOps.Attestor.Infrastructure.Verification;
namespace StellaOps.Attestor.Infrastructure;
@@ -23,7 +26,9 @@ public static class ServiceCollectionExtensions
{
public static IServiceCollection AddAttestorInfrastructure(this IServiceCollection services)
{
services.AddSingleton<IDsseCanonicalizer, DefaultDsseCanonicalizer>();
services.AddMemoryCache();
services.AddSingleton<IDsseCanonicalizer, DefaultDsseCanonicalizer>();
services.AddSingleton(sp =>
{
var canonicalizer = sp.GetRequiredService<IDsseCanonicalizer>();
@@ -33,11 +38,34 @@ public static class ServiceCollectionExtensions
services.AddSingleton<AttestorMetrics>();
services.AddSingleton<IAttestorSubmissionService, AttestorSubmissionService>();
services.AddSingleton<IAttestorVerificationService, AttestorVerificationService>();
services.AddHttpClient<HttpRekorClient>(client =>
{
client.Timeout = TimeSpan.FromSeconds(30);
});
services.AddSingleton<IRekorClient>(sp => sp.GetRequiredService<HttpRekorClient>());
services.AddHttpClient<HttpRekorClient>(client =>
{
client.Timeout = TimeSpan.FromSeconds(30);
});
services.AddSingleton<IRekorClient>(sp => sp.GetRequiredService<HttpRekorClient>());
services.AddHttpClient<HttpTransparencyWitnessClient>((sp, client) =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
var timeoutMs = options.TransparencyWitness.RequestTimeoutMs;
if (timeoutMs <= 0)
{
timeoutMs = 15_000;
}
client.Timeout = TimeSpan.FromMilliseconds(timeoutMs);
});
services.AddSingleton<ITransparencyWitnessClient>(sp =>
{
var options = sp.GetRequiredService<IOptions<AttestorOptions>>().Value;
if (!options.TransparencyWitness.Enabled || string.IsNullOrWhiteSpace(options.TransparencyWitness.BaseUrl))
{
return new NullTransparencyWitnessClient();
}
return sp.GetRequiredService<HttpTransparencyWitnessClient>();
});
services.AddSingleton<IMongoClient>(sp =>
{

View File

@@ -0,0 +1,347 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Org.BouncyCastle.Crypto.Parameters;
using StellaOps.Attestor.Core.Options;
using StellaOps.Attestor.Core.Signing;
using StellaOps.Cryptography;
using StellaOps.Cryptography.Kms;
using StellaOps.Cryptography.Plugin.BouncyCastle;
namespace StellaOps.Attestor.Infrastructure.Signing;
internal sealed class AttestorSigningKeyRegistry : IDisposable
{
private readonly Dictionary<string, SigningKeyEntry> _keys;
private readonly FileKmsClient? _kmsClient;
private readonly ILogger<AttestorSigningKeyRegistry> _logger;
public AttestorSigningKeyRegistry(
IOptions<AttestorOptions> options,
TimeProvider timeProvider,
ILogger<AttestorSigningKeyRegistry> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
var attestorOptions = options?.Value ?? new AttestorOptions();
var signingOptions = attestorOptions.Signing ?? new AttestorOptions.SigningOptions();
var providers = new List<ICryptoProvider>();
var providerMap = new Dictionary<string, ICryptoProvider>(StringComparer.OrdinalIgnoreCase);
void RegisterProvider(ICryptoProvider provider)
{
providers.Add(provider);
providerMap[provider.Name] = provider;
}
var defaultProvider = new DefaultCryptoProvider();
RegisterProvider(defaultProvider);
var edProvider = new BouncyCastleEd25519CryptoProvider();
RegisterProvider(edProvider);
KmsCryptoProvider? kmsProvider = null;
if (RequiresKms(signingOptions))
{
var kmsOptions = signingOptions.Kms ?? throw new InvalidOperationException("attestor.signing.kms is required when a signing key declares mode 'kms'.");
if (string.IsNullOrWhiteSpace(kmsOptions.RootPath))
{
throw new InvalidOperationException("attestor.signing.kms.rootPath must be provided when using KMS-backed signing keys.");
}
if (string.IsNullOrWhiteSpace(kmsOptions.Password))
{
throw new InvalidOperationException("attestor.signing.kms.password must be provided when using KMS-backed signing keys.");
}
var fileOptions = new FileKmsOptions
{
RootPath = Path.GetFullPath(kmsOptions.RootPath!),
Password = kmsOptions.Password!,
Algorithm = kmsOptions.Algorithm,
KeyDerivationIterations = kmsOptions.KeyDerivationIterations ?? 600_000
};
_kmsClient = new FileKmsClient(fileOptions);
kmsProvider = new KmsCryptoProvider(_kmsClient);
RegisterProvider(kmsProvider);
}
Registry = new CryptoProviderRegistry(providers, signingOptions.PreferredProviders);
_keys = new Dictionary<string, SigningKeyEntry>(StringComparer.OrdinalIgnoreCase);
foreach (var key in signingOptions.Keys ?? Array.Empty<AttestorOptions.SigningKeyOptions>())
{
if (key is null || !key.Enabled)
{
continue;
}
var entry = CreateEntry(
key,
providerMap,
defaultProvider,
edProvider,
kmsProvider,
_kmsClient,
timeProvider);
if (_keys.ContainsKey(entry.KeyId))
{
throw new InvalidOperationException($"Duplicate signing key id '{entry.KeyId}' configured.");
}
_keys[entry.KeyId] = entry;
_logger.LogInformation("Registered attestor signing key {KeyId} using provider {Provider} and algorithm {Algorithm}.", entry.KeyId, entry.ProviderName, entry.Algorithm);
}
}
public ICryptoProviderRegistry Registry { get; }
public SigningKeyEntry GetRequired(string keyId)
{
if (string.IsNullOrWhiteSpace(keyId))
{
throw new AttestorSigningException("key_missing", "Signing key id must be provided.");
}
if (_keys.TryGetValue(keyId, out var entry))
{
return entry;
}
throw new AttestorSigningException("key_not_found", $"Signing key '{keyId}' is not configured.");
}
public void Dispose()
{
_kmsClient?.Dispose();
}
private static bool RequiresKms(AttestorOptions.SigningOptions signingOptions)
=> signingOptions.Keys?.Any(static key =>
string.Equals(key?.Mode, "kms", StringComparison.OrdinalIgnoreCase)) == true;
private SigningKeyEntry CreateEntry(
AttestorOptions.SigningKeyOptions key,
IReadOnlyDictionary<string, ICryptoProvider> providers,
DefaultCryptoProvider defaultProvider,
BouncyCastleEd25519CryptoProvider edProvider,
KmsCryptoProvider? kmsProvider,
FileKmsClient? kmsClient,
TimeProvider timeProvider)
{
var providerName = ResolveProviderName(key);
if (!providers.TryGetValue(providerName, out var provider))
{
throw new InvalidOperationException($"Signing provider '{providerName}' is not registered for key '{key.KeyId}'.");
}
var providerKeyId = string.IsNullOrWhiteSpace(key.ProviderKeyId) ? key.KeyId : key.ProviderKeyId!;
if (string.IsNullOrWhiteSpace(providerKeyId))
{
throw new InvalidOperationException($"Signing key '{key.KeyId}' must specify a provider key identifier.");
}
var now = timeProvider.GetUtcNow();
var normalizedAlgorithm = NormalizeAlgorithm(key.Algorithm ?? string.Empty);
if (string.Equals(providerName, "kms", StringComparison.OrdinalIgnoreCase))
{
if (kmsProvider is null || kmsClient is null)
{
throw new InvalidOperationException($"KMS signing provider is not configured but signing key '{key.KeyId}' requests mode 'kms'.");
}
var versionId = key.KmsVersionId;
if (string.IsNullOrWhiteSpace(versionId))
{
throw new InvalidOperationException($"Signing key '{key.KeyId}' must specify kmsVersionId when using mode 'kms'.");
}
var material = kmsClient.ExportAsync(providerKeyId, versionId, default).GetAwaiter().GetResult();
var parameters = new ECParameters
{
Curve = ECCurve.NamedCurves.nistP256,
D = material.D,
Q = new ECPoint
{
X = material.Qx,
Y = material.Qy
}
};
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["kms.version"] = material.VersionId
};
var signingKey = new CryptoSigningKey(
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,
in parameters,
now,
expiresAt: null,
metadata: metadata);
kmsProvider.UpsertSigningKey(signingKey);
}
else if (string.Equals(providerName, "bouncycastle.ed25519", StringComparison.OrdinalIgnoreCase))
{
var privateKeyBytes = LoadPrivateKeyBytes(key);
var privateKeyParameters = new Ed25519PrivateKeyParameters(privateKeyBytes, 0);
var publicKeyBytes = privateKeyParameters.GeneratePublicKey().GetEncoded();
var signingKey = new CryptoSigningKey(
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,
privateKeyBytes,
now,
publicKey: publicKeyBytes);
edProvider.UpsertSigningKey(signingKey);
}
else
{
var parameters = LoadEcParameters(key);
var signingKey = new CryptoSigningKey(
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,
in parameters,
now);
defaultProvider.UpsertSigningKey(signingKey);
}
var mode = string.IsNullOrWhiteSpace(key.Mode)
? (string.Equals(providerName, "kms", StringComparison.OrdinalIgnoreCase) ? "kms" : "keyful")
: key.Mode!;
var certificateChain = key.CertificateChain?.Count > 0
? key.CertificateChain.ToArray()
: Array.Empty<string>();
return new SigningKeyEntry(
key.KeyId,
providerKeyId,
providerName,
normalizedAlgorithm,
mode,
certificateChain);
}
private static string ResolveProviderName(AttestorOptions.SigningKeyOptions key)
{
if (!string.IsNullOrWhiteSpace(key.Provider))
{
return key.Provider!;
}
if (string.Equals(key.Mode, "kms", StringComparison.OrdinalIgnoreCase))
{
return "kms";
}
if (string.Equals(key.Algorithm, SignatureAlgorithms.Ed25519, StringComparison.OrdinalIgnoreCase) ||
string.Equals(key.Algorithm, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase))
{
return "bouncycastle.ed25519";
}
return "default";
}
private static string NormalizeAlgorithm(string algorithm)
{
if (string.IsNullOrWhiteSpace(algorithm))
{
return SignatureAlgorithms.Es256;
}
if (string.Equals(algorithm, SignatureAlgorithms.EdDsa, StringComparison.OrdinalIgnoreCase))
{
return SignatureAlgorithms.Ed25519;
}
return algorithm.ToUpperInvariant();
}
private static byte[] LoadPrivateKeyBytes(AttestorOptions.SigningKeyOptions key)
{
var material = ReadMaterial(key);
return key.MaterialFormat?.ToLowerInvariant() switch
{
"base64" or null => Convert.FromBase64String(material),
"hex" => Convert.FromHexString(material),
_ => throw new InvalidOperationException($"Unsupported materialFormat '{key.MaterialFormat}' for Ed25519 signing key '{key.KeyId}'. Supported formats: base64, hex.")
};
}
private static ECParameters LoadEcParameters(AttestorOptions.SigningKeyOptions key)
{
var material = ReadMaterial(key);
using var ecdsa = ECDsa.Create();
switch (key.MaterialFormat?.ToLowerInvariant())
{
case null:
case "pem":
ecdsa.ImportFromPem(material);
break;
case "base64":
{
var pkcs8 = Convert.FromBase64String(material);
ecdsa.ImportPkcs8PrivateKey(pkcs8, out _);
break;
}
case "hex":
{
var pkcs8 = Convert.FromHexString(material);
ecdsa.ImportPkcs8PrivateKey(pkcs8, out _);
break;
}
default:
throw new InvalidOperationException($"Unsupported materialFormat '{key.MaterialFormat}' for signing key '{key.KeyId}'. Supported formats: pem, base64, hex.");
}
return ecdsa.ExportParameters(true);
}
private static string ReadMaterial(AttestorOptions.SigningKeyOptions key)
{
if (!string.IsNullOrWhiteSpace(key.MaterialPassphrase))
{
throw new InvalidOperationException($"Signing key '{key.KeyId}' specifies a materialPassphrase but encrypted keys are not yet supported.");
}
if (!string.IsNullOrWhiteSpace(key.Material))
{
return key.Material.Trim();
}
if (!string.IsNullOrWhiteSpace(key.MaterialPath))
{
var path = Path.GetFullPath(key.MaterialPath);
if (!File.Exists(path))
{
throw new InvalidOperationException($"Signing key material file '{path}' for key '{key.KeyId}' does not exist.");
}
return File.ReadAllText(path).Trim();
}
throw new InvalidOperationException($"Signing key '{key.KeyId}' must provide either inline material or a materialPath.");
}
internal sealed record SigningKeyEntry(
string KeyId,
string ProviderKeyId,
string ProviderName,
string Algorithm,
string Mode,
IReadOnlyList<string> CertificateChain);
}

View File

@@ -0,0 +1,260 @@
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.Core.Audit;
using StellaOps.Attestor.Core.Observability;
using StellaOps.Attestor.Core.Signing;
using StellaOps.Attestor.Core.Submission;
using StellaOps.Cryptography;
namespace StellaOps.Attestor.Infrastructure.Signing;
internal sealed class AttestorSigningService : IAttestationSigningService
{
private readonly AttestorSigningKeyRegistry _registry;
private readonly IDsseCanonicalizer _canonicalizer;
private readonly StellaOps.Attestor.Core.Storage.IAttestorAuditSink _auditSink;
private readonly AttestorMetrics _metrics;
private readonly ILogger<AttestorSigningService> _logger;
private readonly TimeProvider _timeProvider;
public AttestorSigningService(
AttestorSigningKeyRegistry registry,
IDsseCanonicalizer canonicalizer,
StellaOps.Attestor.Core.Storage.IAttestorAuditSink auditSink,
AttestorMetrics metrics,
ILogger<AttestorSigningService> logger,
TimeProvider timeProvider)
{
_registry = registry ?? throw new ArgumentNullException(nameof(registry));
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
_auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink));
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<AttestationSignResult> SignAsync(
AttestationSignRequest request,
SubmissionContext context,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(context);
var stopwatch = Stopwatch.StartNew();
try
{
if (string.IsNullOrWhiteSpace(request.KeyId))
{
throw new AttestorSigningException("key_missing", "Signing key id is required.");
}
if (string.IsNullOrWhiteSpace(request.PayloadType))
{
throw new AttestorSigningException("payload_type_missing", "payloadType must be provided.");
}
if (string.IsNullOrWhiteSpace(request.PayloadBase64))
{
throw new AttestorSigningException("payload_missing", "payload must be provided as base64.");
}
var entry = _registry.GetRequired(request.KeyId);
byte[] payloadBytes;
try
{
payloadBytes = Convert.FromBase64String(request.PayloadBase64.Trim());
}
catch (FormatException)
{
throw new AttestorSigningException("payload_invalid_base64", "payload must be valid base64.");
}
var normalizedPayload = Convert.ToBase64String(payloadBytes);
var preAuth = DssePreAuthenticationEncoding.Compute(request.PayloadType, payloadBytes);
var signerResolution = _registry.Registry.ResolveSigner(
CryptoCapability.Signing,
entry.Algorithm,
new CryptoKeyReference(entry.ProviderKeyId, entry.ProviderName),
entry.ProviderName);
var signatureBytes = await signerResolution.Signer.SignAsync(preAuth, cancellationToken).ConfigureAwait(false);
var signatureBase64 = Convert.ToBase64String(signatureBytes);
var bundle = BuildBundle(request, entry, normalizedPayload, signatureBase64);
var meta = BuildMeta(request);
var canonicalRequest = new AttestorSubmissionRequest
{
Bundle = bundle,
Meta = meta
};
var canonical = await _canonicalizer.CanonicalizeAsync(canonicalRequest, cancellationToken).ConfigureAwait(false);
meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant();
var elapsedSeconds = stopwatch.Elapsed.TotalSeconds;
RecordSuccessMetrics(entry, elapsedSeconds);
await WriteAuditAsync(context, entry, meta, elapsedSeconds, result: "signed", error: null, cancellationToken).ConfigureAwait(false);
return new AttestationSignResult
{
Bundle = bundle,
Meta = meta,
KeyId = request.KeyId,
Algorithm = entry.Algorithm,
Mode = bundle.Mode,
Provider = entry.ProviderName,
SignedAt = _timeProvider.GetUtcNow()
};
}
catch (AttestorSigningException)
{
var elapsedSeconds = stopwatch.Elapsed.TotalSeconds;
RecordFailureMetrics(elapsedSeconds);
await WriteAuditAsync(context, null, null, elapsedSeconds, "failed", error: "validation", cancellationToken).ConfigureAwait(false);
throw;
}
catch (Exception ex)
{
var elapsedSeconds = stopwatch.Elapsed.TotalSeconds;
RecordFailureMetrics(elapsedSeconds);
_metrics.ErrorTotal.Add(1, new KeyValuePair<string, object?>("type", "sign"));
_logger.LogError(ex, "Unexpected error while signing attestation.");
await WriteAuditAsync(context, null, null, elapsedSeconds, "failed", error: "unexpected", cancellationToken).ConfigureAwait(false);
throw new AttestorSigningException("signing_failed", "Signing failed due to an internal error.", ex);
}
}
private static AttestorSubmissionRequest.SubmissionBundle BuildBundle(
AttestationSignRequest request,
AttestorSigningKeyRegistry.SigningKeyEntry entry,
string normalizedPayload,
string signatureBase64)
{
var mode = string.IsNullOrWhiteSpace(request.Mode) ? entry.Mode : request.Mode!;
var certificateChain = new List<string>(entry.CertificateChain.Count + (request.CertificateChain?.Count ?? 0));
certificateChain.AddRange(entry.CertificateChain);
if (request.CertificateChain is not null)
{
foreach (var cert in request.CertificateChain)
{
if (!string.IsNullOrWhiteSpace(cert) &&
!certificateChain.Contains(cert, StringComparer.Ordinal))
{
certificateChain.Add(cert);
}
}
}
var bundle = new AttestorSubmissionRequest.SubmissionBundle
{
Mode = mode,
Dsse = new AttestorSubmissionRequest.DsseEnvelope
{
PayloadType = request.PayloadType,
PayloadBase64 = normalizedPayload,
Signatures =
{
new AttestorSubmissionRequest.DsseSignature
{
KeyId = request.KeyId,
Signature = signatureBase64
}
}
},
CertificateChain = certificateChain
};
return bundle;
}
private static AttestorSubmissionRequest.SubmissionMeta BuildMeta(AttestationSignRequest request)
{
var artifact = request.Artifact ?? new AttestorSubmissionRequest.ArtifactInfo();
return new AttestorSubmissionRequest.SubmissionMeta
{
Artifact = new AttestorSubmissionRequest.ArtifactInfo
{
Sha256 = artifact.Sha256,
Kind = artifact.Kind,
ImageDigest = artifact.ImageDigest,
SubjectUri = artifact.SubjectUri
},
Archive = request.Archive,
LogPreference = string.IsNullOrWhiteSpace(request.LogPreference)
? "primary"
: request.LogPreference.Trim()
};
}
private void RecordSuccessMetrics(AttestorSigningKeyRegistry.SigningKeyEntry entry, double elapsedSeconds)
{
_metrics.SignTotal.Add(1,
new KeyValuePair<string, object?>("result", "success"),
new KeyValuePair<string, object?>("algorithm", entry.Algorithm),
new KeyValuePair<string, object?>("provider", entry.ProviderName));
_metrics.SignLatency.Record(elapsedSeconds,
new KeyValuePair<string, object?>("algorithm", entry.Algorithm),
new KeyValuePair<string, object?>("provider", entry.ProviderName));
}
private void RecordFailureMetrics(double elapsedSeconds)
{
_metrics.SignTotal.Add(1, new KeyValuePair<string, object?>("result", "failure"));
_metrics.SignLatency.Record(elapsedSeconds);
}
private async Task WriteAuditAsync(
SubmissionContext context,
AttestorSigningKeyRegistry.SigningKeyEntry? entry,
AttestorSubmissionRequest.SubmissionMeta? meta,
double elapsedSeconds,
string result,
string? error,
CancellationToken cancellationToken)
{
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
if (entry is not null)
{
metadata["algorithm"] = entry.Algorithm;
metadata["provider"] = entry.ProviderName;
metadata["mode"] = entry.Mode;
metadata["keyId"] = entry.KeyId;
}
if (!string.IsNullOrWhiteSpace(error))
{
metadata["error"] = error!;
}
var record = new AttestorAuditRecord
{
Action = "sign",
Result = result,
ArtifactSha256 = meta?.Artifact?.Sha256 ?? string.Empty,
BundleSha256 = meta?.BundleSha256 ?? string.Empty,
Backend = entry?.ProviderName ?? string.Empty,
LatencyMs = (long)(elapsedSeconds * 1000),
Timestamp = _timeProvider.GetUtcNow(),
Caller = new AttestorAuditRecord.CallerDescriptor
{
Subject = context.CallerSubject,
Audience = context.CallerAudience,
ClientId = context.CallerClientId,
MtlsThumbprint = context.MtlsThumbprint,
Tenant = context.CallerTenant
},
Metadata = metadata
};
await _auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -1,21 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="AWSSDK.S3" Version="3.7.307.6" />
</ItemGroup>
</Project>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
<ProjectReference Include="..\..\StellaOps.Attestor.Verify\StellaOps.Attestor.Verify.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography.Kms\StellaOps.Cryptography.Kms.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.24" />
<PackageReference Include="AWSSDK.S3" Version="3.7.307.6" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,56 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class CachingAttestorDedupeStore : IAttestorDedupeStore
{
private readonly IAttestorDedupeStore _cache;
private readonly IAttestorDedupeStore _inner;
private readonly ILogger<CachingAttestorDedupeStore> _logger;
public CachingAttestorDedupeStore(
IAttestorDedupeStore cache,
IAttestorDedupeStore inner,
ILogger<CachingAttestorDedupeStore> logger)
{
_cache = cache;
_inner = inner;
_logger = logger;
}
public async Task<string?> TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
try
{
var cached = await _cache.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false);
if (!string.IsNullOrWhiteSpace(cached))
{
return cached;
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Dedupe cache lookup failed for bundle {BundleSha}", bundleSha256);
}
return await _inner.TryGetExistingAsync(bundleSha256, cancellationToken).ConfigureAwait(false);
}
public async Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default)
{
await _inner.SetAsync(bundleSha256, rekorUuid, ttl, cancellationToken).ConfigureAwait(false);
try
{
await _cache.SetAsync(bundleSha256, rekorUuid, ttl, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to update dedupe cache for bundle {BundleSha}", bundleSha256);
}
}
}

View File

@@ -9,20 +9,36 @@ using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorAuditSink : IAttestorAuditSink
{
private readonly IMongoCollection<AttestorAuditDocument> _collection;
public MongoAttestorAuditSink(IMongoCollection<AttestorAuditDocument> collection)
{
_collection = collection;
}
public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default)
{
var document = AttestorAuditDocument.FromRecord(record);
return _collection.InsertOneAsync(document, cancellationToken: cancellationToken);
}
internal sealed class MongoAttestorAuditSink : IAttestorAuditSink
{
private readonly IMongoCollection<AttestorAuditDocument> _collection;
private static int _indexesInitialized;
public MongoAttestorAuditSink(IMongoCollection<AttestorAuditDocument> collection)
{
_collection = collection;
EnsureIndexes();
}
public Task WriteAsync(AttestorAuditRecord record, CancellationToken cancellationToken = default)
{
var document = AttestorAuditDocument.FromRecord(record);
return _collection.InsertOneAsync(document, cancellationToken: cancellationToken);
}
private void EnsureIndexes()
{
if (Interlocked.Exchange(ref _indexesInitialized, 1) == 1)
{
return;
}
var index = new CreateIndexModel<AttestorAuditDocument>(
Builders<AttestorAuditDocument>.IndexKeys.Descending(x => x.Timestamp),
new CreateIndexOptions { Name = "ts_desc" });
_collection.Indexes.CreateOne(index);
}
internal sealed class AttestorAuditDocument
{

View File

@@ -0,0 +1,111 @@
using System;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorDedupeStore : IAttestorDedupeStore
{
private readonly IMongoCollection<AttestorDedupeDocument> _collection;
private readonly TimeProvider _timeProvider;
private static int _indexesInitialized;
public MongoAttestorDedupeStore(
IMongoCollection<AttestorDedupeDocument> collection,
TimeProvider timeProvider)
{
_collection = collection;
_timeProvider = timeProvider;
EnsureIndexes();
}
public async Task<string?> TryGetExistingAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
var key = BuildKey(bundleSha256);
var now = _timeProvider.GetUtcNow().UtcDateTime;
var filter = Builders<AttestorDedupeDocument>.Filter.Eq(x => x.Key, key);
var document = await _collection
.Find(filter)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
if (document is null)
{
return null;
}
if (document.TtlAt <= now)
{
await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false);
return null;
}
return document.RekorUuid;
}
public Task SetAsync(string bundleSha256, string rekorUuid, TimeSpan ttl, CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow().UtcDateTime;
var expiresAt = now.Add(ttl);
var key = BuildKey(bundleSha256);
var filter = Builders<AttestorDedupeDocument>.Filter.Eq(x => x.Key, key);
var update = Builders<AttestorDedupeDocument>.Update
.SetOnInsert(x => x.Key, key)
.Set(x => x.RekorUuid, rekorUuid)
.Set(x => x.CreatedAt, now)
.Set(x => x.TtlAt, expiresAt);
return _collection.UpdateOneAsync(
filter,
update,
new UpdateOptions { IsUpsert = true },
cancellationToken);
}
private static string BuildKey(string bundleSha256) => $"bundle:{bundleSha256}";
private void EnsureIndexes()
{
if (Interlocked.Exchange(ref _indexesInitialized, 1) == 1)
{
return;
}
var indexes = new[]
{
new CreateIndexModel<AttestorDedupeDocument>(
Builders<AttestorDedupeDocument>.IndexKeys.Ascending(x => x.Key),
new CreateIndexOptions { Unique = true, Name = "dedupe_key_unique" }),
new CreateIndexModel<AttestorDedupeDocument>(
Builders<AttestorDedupeDocument>.IndexKeys.Ascending(x => x.TtlAt),
new CreateIndexOptions { ExpireAfter = TimeSpan.Zero, Name = "dedupe_ttl" })
};
_collection.Indexes.CreateMany(indexes);
}
[BsonIgnoreExtraElements]
internal sealed class AttestorDedupeDocument
{
[BsonId]
public ObjectId Id { get; set; }
[BsonElement("key")]
public string Key { get; set; } = string.Empty;
[BsonElement("rekorUuid")]
public string RekorUuid { get; set; } = string.Empty;
[BsonElement("createdAt")]
public DateTime CreatedAt { get; set; }
[BsonElement("ttlAt")]
public DateTime TtlAt { get; set; }
}
}

View File

@@ -1,342 +1,609 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
{
private readonly IMongoCollection<AttestorEntryDocument> _entries;
public MongoAttestorEntryRepository(IMongoCollection<AttestorEntryDocument> entries)
{
_entries = entries;
}
public async Task<AttestorEntry?> GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.BundleSha256, bundleSha256);
var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain();
}
public async Task<AttestorEntry?> GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Id, rekorUuid);
var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain();
}
public async Task<IReadOnlyList<AttestorEntry>> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Artifact.Sha256, artifactSha256);
var documents = await _entries.Find(filter).ToListAsync(cancellationToken).ConfigureAwait(false);
return documents.ConvertAll(static doc => doc.ToDomain());
}
public async Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default)
{
var document = AttestorEntryDocument.FromDomain(entry);
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Id, document.Id);
await _entries.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
[BsonIgnoreExtraElements]
internal sealed class AttestorEntryDocument
{
[BsonId]
public string Id { get; set; } = string.Empty;
[BsonElement("artifact")]
public ArtifactDocument Artifact { get; set; } = new();
[BsonElement("bundleSha256")]
public string BundleSha256 { get; set; } = string.Empty;
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("log")]
public LogDocument Log { get; set; } = new();
[BsonElement("createdAt")]
public BsonDateTime CreatedAt { get; set; } = BsonDateTime.Create(System.DateTimeOffset.UtcNow);
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("signerIdentity")]
public SignerIdentityDocument SignerIdentity { get; set; } = new();
[BsonElement("mirror")]
public MirrorDocument? Mirror { get; set; }
public static AttestorEntryDocument FromDomain(AttestorEntry entry)
{
return new AttestorEntryDocument
{
Id = entry.RekorUuid,
Artifact = new ArtifactDocument
{
Sha256 = entry.Artifact.Sha256,
Kind = entry.Artifact.Kind,
ImageDigest = entry.Artifact.ImageDigest,
SubjectUri = entry.Artifact.SubjectUri
},
BundleSha256 = entry.BundleSha256,
Index = entry.Index,
Proof = entry.Proof is null ? null : new ProofDocument
{
Checkpoint = entry.Proof.Checkpoint is null ? null : new CheckpointDocument
{
Origin = entry.Proof.Checkpoint.Origin,
Size = entry.Proof.Checkpoint.Size,
RootHash = entry.Proof.Checkpoint.RootHash,
Timestamp = entry.Proof.Checkpoint.Timestamp is null
? null
: BsonDateTime.Create(entry.Proof.Checkpoint.Timestamp.Value)
},
Inclusion = entry.Proof.Inclusion is null ? null : new InclusionDocument
{
LeafHash = entry.Proof.Inclusion.LeafHash,
Path = entry.Proof.Inclusion.Path
}
},
Log = new LogDocument
{
Backend = entry.Log.Backend,
Url = entry.Log.Url,
LogId = entry.Log.LogId
},
CreatedAt = BsonDateTime.Create(entry.CreatedAt.UtcDateTime),
Status = entry.Status,
SignerIdentity = new SignerIdentityDocument
{
Mode = entry.SignerIdentity.Mode,
Issuer = entry.SignerIdentity.Issuer,
SubjectAlternativeName = entry.SignerIdentity.SubjectAlternativeName,
KeyId = entry.SignerIdentity.KeyId
},
Mirror = entry.Mirror is null ? null : MirrorDocument.FromDomain(entry.Mirror)
};
}
public AttestorEntry ToDomain()
{
return new AttestorEntry
{
RekorUuid = Id,
Artifact = new AttestorEntry.ArtifactDescriptor
{
Sha256 = Artifact.Sha256,
Kind = Artifact.Kind,
ImageDigest = Artifact.ImageDigest,
SubjectUri = Artifact.SubjectUri
},
BundleSha256 = BundleSha256,
Index = Index,
Proof = Proof is null ? null : new AttestorEntry.ProofDescriptor
{
Checkpoint = Proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor
{
Origin = Proof.Checkpoint.Origin,
Size = Proof.Checkpoint.Size,
RootHash = Proof.Checkpoint.RootHash,
Timestamp = Proof.Checkpoint.Timestamp?.ToUniversalTime()
},
Inclusion = Proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor
{
LeafHash = Proof.Inclusion.LeafHash,
Path = Proof.Inclusion.Path
}
},
Log = new AttestorEntry.LogDescriptor
{
Backend = Log.Backend,
Url = Log.Url,
LogId = Log.LogId
},
CreatedAt = CreatedAt.ToUniversalTime(),
Status = Status,
SignerIdentity = new AttestorEntry.SignerIdentityDescriptor
{
Mode = SignerIdentity.Mode,
Issuer = SignerIdentity.Issuer,
SubjectAlternativeName = SignerIdentity.SubjectAlternativeName,
KeyId = SignerIdentity.KeyId
},
Mirror = Mirror?.ToDomain()
};
}
internal sealed class ArtifactDocument
{
[BsonElement("sha256")]
public string Sha256 { get; set; } = string.Empty;
[BsonElement("kind")]
public string Kind { get; set; } = string.Empty;
[BsonElement("imageDigest")]
public string? ImageDigest { get; set; }
[BsonElement("subjectUri")]
public string? SubjectUri { get; set; }
}
internal sealed class ProofDocument
{
[BsonElement("checkpoint")]
public CheckpointDocument? Checkpoint { get; set; }
[BsonElement("inclusion")]
public InclusionDocument? Inclusion { get; set; }
}
internal sealed class CheckpointDocument
{
[BsonElement("origin")]
public string? Origin { get; set; }
[BsonElement("size")]
public long Size { get; set; }
[BsonElement("rootHash")]
public string? RootHash { get; set; }
[BsonElement("timestamp")]
public BsonDateTime? Timestamp { get; set; }
}
internal sealed class InclusionDocument
{
[BsonElement("leafHash")]
public string? LeafHash { get; set; }
[BsonElement("path")]
public IReadOnlyList<string> Path { get; set; } = System.Array.Empty<string>();
}
internal sealed class LogDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = "primary";
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("logId")]
public string? LogId { get; set; }
}
internal sealed class SignerIdentityDocument
{
[BsonElement("mode")]
public string Mode { get; set; } = string.Empty;
[BsonElement("issuer")]
public string? Issuer { get; set; }
[BsonElement("san")]
public string? SubjectAlternativeName { get; set; }
[BsonElement("kid")]
public string? KeyId { get; set; }
}
internal sealed class MirrorDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = string.Empty;
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("uuid")]
public string? Uuid { get; set; }
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("logId")]
public string? LogId { get; set; }
[BsonElement("error")]
public string? Error { get; set; }
public static MirrorDocument FromDomain(AttestorEntry.LogReplicaDescriptor mirror)
{
return new MirrorDocument
{
Backend = mirror.Backend,
Url = mirror.Url,
Uuid = mirror.Uuid,
Index = mirror.Index,
Status = mirror.Status,
Proof = mirror.Proof is null ? null : new ProofDocument
{
Checkpoint = mirror.Proof.Checkpoint is null ? null : new CheckpointDocument
{
Origin = mirror.Proof.Checkpoint.Origin,
Size = mirror.Proof.Checkpoint.Size,
RootHash = mirror.Proof.Checkpoint.RootHash,
Timestamp = mirror.Proof.Checkpoint.Timestamp is null
? null
: BsonDateTime.Create(mirror.Proof.Checkpoint.Timestamp.Value)
},
Inclusion = mirror.Proof.Inclusion is null ? null : new InclusionDocument
{
LeafHash = mirror.Proof.Inclusion.LeafHash,
Path = mirror.Proof.Inclusion.Path
}
},
LogId = mirror.LogId,
Error = mirror.Error
};
}
public AttestorEntry.LogReplicaDescriptor ToDomain()
{
return new AttestorEntry.LogReplicaDescriptor
{
Backend = Backend,
Url = Url,
Uuid = Uuid,
Index = Index,
Status = Status,
Proof = Proof is null ? null : new AttestorEntry.ProofDescriptor
{
Checkpoint = Proof.Checkpoint is null ? null : new AttestorEntry.CheckpointDescriptor
{
Origin = Proof.Checkpoint.Origin,
Size = Proof.Checkpoint.Size,
RootHash = Proof.Checkpoint.RootHash,
Timestamp = Proof.Checkpoint.Timestamp?.ToUniversalTime()
},
Inclusion = Proof.Inclusion is null ? null : new AttestorEntry.InclusionDescriptor
{
LeafHash = Proof.Inclusion.LeafHash,
Path = Proof.Inclusion.Path
}
},
LogId = LogId,
Error = Error
};
}
}
}
}
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson.Serialization.Attributes;
using MongoDB.Driver;
using StellaOps.Attestor.Core.Storage;
namespace StellaOps.Attestor.Infrastructure.Storage;
internal sealed class MongoAttestorEntryRepository : IAttestorEntryRepository
{
private const int DefaultPageSize = 50;
private const int MaxPageSize = 200;
private readonly IMongoCollection<AttestorEntryDocument> _entries;
public MongoAttestorEntryRepository(IMongoCollection<AttestorEntryDocument> entries)
{
_entries = entries ?? throw new ArgumentNullException(nameof(entries));
EnsureIndexes();
}
public async Task<AttestorEntry?> GetByBundleShaAsync(string bundleSha256, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.BundleSha256, bundleSha256);
var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain();
}
public async Task<AttestorEntry?> GetByUuidAsync(string rekorUuid, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Id, rekorUuid);
var document = await _entries.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false);
return document?.ToDomain();
}
public async Task<IReadOnlyList<AttestorEntry>> GetByArtifactShaAsync(string artifactSha256, CancellationToken cancellationToken = default)
{
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Artifact.Sha256, artifactSha256);
var documents = await _entries.Find(filter)
.Sort(Builders<AttestorEntryDocument>.Sort.Descending(x => x.CreatedAt))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return documents.ConvertAll(static doc => doc.ToDomain());
}
public async Task SaveAsync(AttestorEntry entry, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entry);
var document = AttestorEntryDocument.FromDomain(entry);
var filter = Builders<AttestorEntryDocument>.Filter.Eq(x => x.Id, document.Id);
await _entries.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false);
}
public async Task<AttestorEntryQueryResult> QueryAsync(AttestorEntryQuery query, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(query);
var pageSize = query.PageSize <= 0 ? DefaultPageSize : Math.Min(query.PageSize, MaxPageSize);
var filterBuilder = Builders<AttestorEntryDocument>.Filter;
var filter = filterBuilder.Empty;
if (!string.IsNullOrWhiteSpace(query.Subject))
{
var subject = query.Subject;
var subjectFilter = filterBuilder.Or(
filterBuilder.Eq(x => x.Artifact.Sha256, subject),
filterBuilder.Eq(x => x.Artifact.ImageDigest, subject),
filterBuilder.Eq(x => x.Artifact.SubjectUri, subject));
filter &= subjectFilter;
}
if (!string.IsNullOrWhiteSpace(query.Type))
{
filter &= filterBuilder.Eq(x => x.Artifact.Kind, query.Type);
}
if (!string.IsNullOrWhiteSpace(query.Issuer))
{
filter &= filterBuilder.Eq(x => x.SignerIdentity.SubjectAlternativeName, query.Issuer);
}
if (!string.IsNullOrWhiteSpace(query.Scope))
{
filter &= filterBuilder.Eq(x => x.SignerIdentity.Issuer, query.Scope);
}
if (query.CreatedAfter is { } createdAfter)
{
filter &= filterBuilder.Gte(x => x.CreatedAt, createdAfter.UtcDateTime);
}
if (query.CreatedBefore is { } createdBefore)
{
filter &= filterBuilder.Lte(x => x.CreatedAt, createdBefore.UtcDateTime);
}
if (!string.IsNullOrWhiteSpace(query.ContinuationToken))
{
if (!AttestorEntryContinuationToken.TryParse(query.ContinuationToken, out var cursor))
{
throw new FormatException("Invalid continuation token.");
}
var cursorInstant = cursor.CreatedAt.UtcDateTime;
var continuationFilter = filterBuilder.Or(
filterBuilder.Lt(x => x.CreatedAt, cursorInstant),
filterBuilder.And(
filterBuilder.Eq(x => x.CreatedAt, cursorInstant),
filterBuilder.Gt(x => x.Id, cursor.RekorUuid)));
filter &= continuationFilter;
}
var sort = Builders<AttestorEntryDocument>.Sort
.Descending(x => x.CreatedAt)
.Ascending(x => x.Id);
var documents = await _entries.Find(filter)
.Sort(sort)
.Limit(pageSize + 1)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
string? continuation = null;
if (documents.Count > pageSize)
{
var cursorDocument = documents[pageSize];
var nextCreatedAt = DateTime.SpecifyKind(cursorDocument.CreatedAt, DateTimeKind.Utc);
continuation = AttestorEntryContinuationToken.Encode(new DateTimeOffset(nextCreatedAt), cursorDocument.Id);
documents.RemoveRange(pageSize, documents.Count - pageSize);
}
var items = documents.ConvertAll(static doc => doc.ToDomain());
return new AttestorEntryQueryResult
{
Items = items,
ContinuationToken = continuation
};
}
private void EnsureIndexes()
{
var keys = Builders<AttestorEntryDocument>.IndexKeys;
var models = new[]
{
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.BundleSha256),
new CreateIndexOptions { Name = "bundle_sha_unique", Unique = true }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Descending(x => x.CreatedAt).Ascending(x => x.Id),
new CreateIndexOptions { Name = "created_at_uuid" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.Artifact.Sha256),
new CreateIndexOptions { Name = "artifact_sha" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.Artifact.ImageDigest),
new CreateIndexOptions { Name = "artifact_image_digest" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.Artifact.SubjectUri),
new CreateIndexOptions { Name = "artifact_subject_uri" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.SignerIdentity.Issuer)
.Ascending(x => x.Artifact.Kind)
.Descending(x => x.CreatedAt)
.Ascending(x => x.Id),
new CreateIndexOptions { Name = "scope_kind_created_at" }),
new CreateIndexModel<AttestorEntryDocument>(
keys.Ascending(x => x.SignerIdentity.SubjectAlternativeName),
new CreateIndexOptions { Name = "issuer_san" })
};
_entries.Indexes.CreateMany(models);
}
[BsonIgnoreExtraElements]
internal sealed class AttestorEntryDocument
{
[BsonId]
public string Id { get; set; } = string.Empty;
[BsonElement("artifact")]
public ArtifactDocument Artifact { get; set; } = new();
[BsonElement("bundleSha256")]
public string BundleSha256 { get; set; } = string.Empty;
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("witness")]
public WitnessDocument? Witness { get; set; }
[BsonElement("log")]
public LogDocument Log { get; set; } = new();
[BsonElement("createdAt")]
[BsonDateTimeOptions(Kind = DateTimeKind.Utc)]
public DateTime CreatedAt { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("signer")]
public SignerIdentityDocument SignerIdentity { get; set; } = new();
[BsonElement("mirror")]
public MirrorDocument? Mirror { get; set; }
public static AttestorEntryDocument FromDomain(AttestorEntry entry)
{
ArgumentNullException.ThrowIfNull(entry);
return new AttestorEntryDocument
{
Id = entry.RekorUuid,
Artifact = ArtifactDocument.FromDomain(entry.Artifact),
BundleSha256 = entry.BundleSha256,
Index = entry.Index,
Proof = ProofDocument.FromDomain(entry.Proof),
Witness = WitnessDocument.FromDomain(entry.Witness),
Log = LogDocument.FromDomain(entry.Log),
CreatedAt = entry.CreatedAt.UtcDateTime,
Status = entry.Status,
SignerIdentity = SignerIdentityDocument.FromDomain(entry.SignerIdentity),
Mirror = MirrorDocument.FromDomain(entry.Mirror)
};
}
public AttestorEntry ToDomain()
{
var createdAtUtc = DateTime.SpecifyKind(CreatedAt, DateTimeKind.Utc);
return new AttestorEntry
{
RekorUuid = Id,
Artifact = Artifact.ToDomain(),
BundleSha256 = BundleSha256,
Index = Index,
Proof = Proof?.ToDomain(),
Witness = Witness?.ToDomain(),
Log = Log.ToDomain(),
CreatedAt = new DateTimeOffset(createdAtUtc),
Status = Status,
SignerIdentity = SignerIdentity.ToDomain(),
Mirror = Mirror?.ToDomain()
};
}
}
internal sealed class ArtifactDocument
{
[BsonElement("sha256")]
public string Sha256 { get; set; } = string.Empty;
[BsonElement("kind")]
public string Kind { get; set; } = string.Empty;
[BsonElement("imageDigest")]
public string? ImageDigest { get; set; }
[BsonElement("subjectUri")]
public string? SubjectUri { get; set; }
public static ArtifactDocument FromDomain(AttestorEntry.ArtifactDescriptor artifact)
{
ArgumentNullException.ThrowIfNull(artifact);
return new ArtifactDocument
{
Sha256 = artifact.Sha256,
Kind = artifact.Kind,
ImageDigest = artifact.ImageDigest,
SubjectUri = artifact.SubjectUri
};
}
public AttestorEntry.ArtifactDescriptor ToDomain()
{
return new AttestorEntry.ArtifactDescriptor
{
Sha256 = Sha256,
Kind = Kind,
ImageDigest = ImageDigest,
SubjectUri = SubjectUri
};
}
}
internal sealed class ProofDocument
{
[BsonElement("checkpoint")]
public CheckpointDocument? Checkpoint { get; set; }
[BsonElement("inclusion")]
public InclusionDocument? Inclusion { get; set; }
public static ProofDocument? FromDomain(AttestorEntry.ProofDescriptor? proof)
{
if (proof is null)
{
return null;
}
return new ProofDocument
{
Checkpoint = CheckpointDocument.FromDomain(proof.Checkpoint),
Inclusion = InclusionDocument.FromDomain(proof.Inclusion)
};
}
public AttestorEntry.ProofDescriptor ToDomain()
{
return new AttestorEntry.ProofDescriptor
{
Checkpoint = Checkpoint?.ToDomain(),
Inclusion = Inclusion?.ToDomain()
};
}
}
internal sealed class WitnessDocument
{
[BsonElement("aggregator")]
public string? Aggregator { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "unknown";
[BsonElement("rootHash")]
public string? RootHash { get; set; }
[BsonElement("retrievedAt")]
[BsonDateTimeOptions(Kind = DateTimeKind.Utc)]
public DateTime RetrievedAt { get; set; }
[BsonElement("statement")]
public string? Statement { get; set; }
[BsonElement("signature")]
public string? Signature { get; set; }
[BsonElement("keyId")]
public string? KeyId { get; set; }
[BsonElement("error")]
public string? Error { get; set; }
public static WitnessDocument? FromDomain(AttestorEntry.WitnessDescriptor? witness)
{
if (witness is null)
{
return null;
}
return new WitnessDocument
{
Aggregator = witness.Aggregator,
Status = witness.Status,
RootHash = witness.RootHash,
RetrievedAt = witness.RetrievedAt.UtcDateTime,
Statement = witness.Statement,
Signature = witness.Signature,
KeyId = witness.KeyId,
Error = witness.Error
};
}
public AttestorEntry.WitnessDescriptor ToDomain()
{
return new AttestorEntry.WitnessDescriptor
{
Aggregator = Aggregator ?? string.Empty,
Status = string.IsNullOrWhiteSpace(Status) ? "unknown" : Status,
RootHash = RootHash,
RetrievedAt = new DateTimeOffset(DateTime.SpecifyKind(RetrievedAt, DateTimeKind.Utc)),
Statement = Statement,
Signature = Signature,
KeyId = KeyId,
Error = Error
};
}
}
internal sealed class CheckpointDocument
{
[BsonElement("origin")]
public string? Origin { get; set; }
[BsonElement("size")]
public long Size { get; set; }
[BsonElement("rootHash")]
public string? RootHash { get; set; }
[BsonElement("timestamp")]
[BsonDateTimeOptions(Kind = DateTimeKind.Utc)]
public DateTime? Timestamp { get; set; }
public static CheckpointDocument? FromDomain(AttestorEntry.CheckpointDescriptor? checkpoint)
{
if (checkpoint is null)
{
return null;
}
return new CheckpointDocument
{
Origin = checkpoint.Origin,
Size = checkpoint.Size,
RootHash = checkpoint.RootHash,
Timestamp = checkpoint.Timestamp?.UtcDateTime
};
}
public AttestorEntry.CheckpointDescriptor ToDomain()
{
return new AttestorEntry.CheckpointDescriptor
{
Origin = Origin,
Size = Size,
RootHash = RootHash,
Timestamp = Timestamp is null ? null : new DateTimeOffset(DateTime.SpecifyKind(Timestamp.Value, DateTimeKind.Utc))
};
}
}
internal sealed class InclusionDocument
{
[BsonElement("leafHash")]
public string? LeafHash { get; set; }
[BsonElement("path")]
public IReadOnlyList<string> Path { get; set; } = Array.Empty<string>();
public static InclusionDocument? FromDomain(AttestorEntry.InclusionDescriptor? inclusion)
{
if (inclusion is null)
{
return null;
}
return new InclusionDocument
{
LeafHash = inclusion.LeafHash,
Path = inclusion.Path
};
}
public AttestorEntry.InclusionDescriptor ToDomain()
{
return new AttestorEntry.InclusionDescriptor
{
LeafHash = LeafHash,
Path = Path
};
}
}
internal sealed class LogDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = "primary";
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("logId")]
public string? LogId { get; set; }
public static LogDocument FromDomain(AttestorEntry.LogDescriptor log)
{
ArgumentNullException.ThrowIfNull(log);
return new LogDocument
{
Backend = log.Backend,
Url = log.Url,
LogId = log.LogId
};
}
public AttestorEntry.LogDescriptor ToDomain()
{
return new AttestorEntry.LogDescriptor
{
Backend = Backend,
Url = Url,
LogId = LogId
};
}
}
internal sealed class SignerIdentityDocument
{
[BsonElement("mode")]
public string Mode { get; set; } = string.Empty;
[BsonElement("issuer")]
public string? Issuer { get; set; }
[BsonElement("san")]
public string? SubjectAlternativeName { get; set; }
[BsonElement("kid")]
public string? KeyId { get; set; }
public static SignerIdentityDocument FromDomain(AttestorEntry.SignerIdentityDescriptor signer)
{
ArgumentNullException.ThrowIfNull(signer);
return new SignerIdentityDocument
{
Mode = signer.Mode,
Issuer = signer.Issuer,
SubjectAlternativeName = signer.SubjectAlternativeName,
KeyId = signer.KeyId
};
}
public AttestorEntry.SignerIdentityDescriptor ToDomain()
{
return new AttestorEntry.SignerIdentityDescriptor
{
Mode = Mode,
Issuer = Issuer,
SubjectAlternativeName = SubjectAlternativeName,
KeyId = KeyId
};
}
}
internal sealed class MirrorDocument
{
[BsonElement("backend")]
public string Backend { get; set; } = string.Empty;
[BsonElement("url")]
public string Url { get; set; } = string.Empty;
[BsonElement("uuid")]
public string? Uuid { get; set; }
[BsonElement("index")]
public long? Index { get; set; }
[BsonElement("status")]
public string Status { get; set; } = "pending";
[BsonElement("proof")]
public ProofDocument? Proof { get; set; }
[BsonElement("witness")]
public WitnessDocument? Witness { get; set; }
[BsonElement("logId")]
public string? LogId { get; set; }
[BsonElement("error")]
public string? Error { get; set; }
public static MirrorDocument? FromDomain(AttestorEntry.LogReplicaDescriptor? mirror)
{
if (mirror is null)
{
return null;
}
return new MirrorDocument
{
Backend = mirror.Backend,
Url = mirror.Url,
Uuid = mirror.Uuid,
Index = mirror.Index,
Status = mirror.Status,
Proof = ProofDocument.FromDomain(mirror.Proof),
Witness = WitnessDocument.FromDomain(mirror.Witness),
LogId = mirror.LogId,
Error = mirror.Error
};
}
public AttestorEntry.LogReplicaDescriptor ToDomain()
{
return new AttestorEntry.LogReplicaDescriptor
{
Backend = Backend,
Url = Url,
Uuid = Uuid,
Index = Index,
Status = Status,
Proof = Proof?.ToDomain(),
Witness = Witness?.ToDomain(),
LogId = LogId,
Error = Error
};
}
}
}

View File

@@ -14,9 +14,15 @@ internal sealed class NullAttestorArchiveStore : IAttestorArchiveStore
_logger = logger;
}
public Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default)
{
_logger.LogDebug("Archive disabled; skipping bundle {BundleSha}", bundle.BundleSha256);
return Task.CompletedTask;
}
}
public Task ArchiveBundleAsync(AttestorArchiveBundle bundle, CancellationToken cancellationToken = default)
{
_logger.LogDebug("Archive disabled; skipping bundle {BundleSha}", bundle.BundleSha256);
return Task.CompletedTask;
}
public Task<AttestorArchiveBundle?> GetBundleAsync(string bundleSha256, string rekorUuid, CancellationToken cancellationToken = default)
{
_logger.LogDebug("Archive disabled; bundle {BundleSha} ({RekorUuid}) cannot be retrieved", bundleSha256, rekorUuid);
return Task.FromResult<AttestorArchiveBundle?>(null);
}
}

Some files were not shown because too many files have changed in this diff Show More