test fixes and new product advisories work

This commit is contained in:
master
2026-01-28 02:30:48 +02:00
parent 82caceba56
commit 644887997c
288 changed files with 69101 additions and 375 deletions

View File

@@ -0,0 +1,338 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under the BUSL-1.1 license.
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.EvidencePack.Models;
/// <summary>
/// Manifest for a Release Evidence Pack containing all metadata for verification.
/// </summary>
public sealed record ReleaseEvidencePackManifest
{
/// <summary>
/// Bundle format version (semver).
/// </summary>
[JsonPropertyName("bundleFormatVersion")]
public required string BundleFormatVersion { get; init; }
/// <summary>
/// Release version being attested.
/// </summary>
[JsonPropertyName("releaseVersion")]
public required string ReleaseVersion { get; init; }
/// <summary>
/// Timestamp when the bundle was created (ISO 8601).
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Git commit SHA for the release source.
/// </summary>
[JsonPropertyName("sourceCommit")]
public required string SourceCommit { get; init; }
/// <summary>
/// SOURCE_DATE_EPOCH used for reproducible builds (Unix timestamp).
/// </summary>
[JsonPropertyName("sourceDateEpoch")]
public required long SourceDateEpoch { get; init; }
/// <summary>
/// Release artifacts included in the pack.
/// </summary>
[JsonPropertyName("artifacts")]
public required ImmutableArray<ArtifactEntry> Artifacts { get; init; }
/// <summary>
/// Checksum entries for all files in the pack.
/// </summary>
[JsonPropertyName("checksums")]
public required ImmutableDictionary<string, ChecksumEntry> Checksums { get; init; }
/// <summary>
/// SBOM references included in the pack.
/// </summary>
[JsonPropertyName("sboms")]
public required ImmutableArray<SbomReference> Sboms { get; init; }
/// <summary>
/// Provenance statements (SLSA) included in the pack.
/// </summary>
[JsonPropertyName("provenanceStatements")]
public required ImmutableArray<ProvenanceReference> ProvenanceStatements { get; init; }
/// <summary>
/// Attestation references (DSSE envelopes) included in the pack.
/// </summary>
[JsonPropertyName("attestations")]
public required ImmutableArray<AttestationReference> Attestations { get; init; }
/// <summary>
/// Rekor transparency log proofs for offline verification.
/// </summary>
[JsonPropertyName("rekorProofs")]
public required ImmutableArray<RekorProofEntry> RekorProofs { get; init; }
/// <summary>
/// Fingerprint of the signing public key.
/// </summary>
[JsonPropertyName("signingKeyFingerprint")]
public required string SigningKeyFingerprint { get; init; }
/// <summary>
/// Rekor transparency log ID.
/// </summary>
[JsonPropertyName("rekorLogId")]
public string? RekorLogId { get; init; }
/// <summary>
/// SHA-256 hash of the manifest itself (computed after serialization, excluding this field).
/// </summary>
[JsonPropertyName("manifestHash")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ManifestHash { get; init; }
}
/// <summary>
/// Entry for a release artifact.
/// </summary>
public sealed record ArtifactEntry
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
[JsonPropertyName("path")]
public required string Path { get; init; }
/// <summary>
/// Human-readable name of the artifact.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Platform/architecture (e.g., "linux-x64", "macos-universal").
/// </summary>
[JsonPropertyName("platform")]
public required string Platform { get; init; }
/// <summary>
/// SHA-256 hash of the artifact.
/// </summary>
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
/// <summary>
/// SHA-512 hash of the artifact.
/// </summary>
[JsonPropertyName("sha512")]
public string? Sha512 { get; init; }
/// <summary>
/// File size in bytes.
/// </summary>
[JsonPropertyName("size")]
public required long Size { get; init; }
/// <summary>
/// Path to the signature file.
/// </summary>
[JsonPropertyName("signaturePath")]
public string? SignaturePath { get; init; }
/// <summary>
/// Path to the certificate file (for keyless signing).
/// </summary>
[JsonPropertyName("certificatePath")]
public string? CertificatePath { get; init; }
}
/// <summary>
/// Checksum entry for a file.
/// </summary>
public sealed record ChecksumEntry
{
/// <summary>
/// SHA-256 hash.
/// </summary>
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
/// <summary>
/// SHA-512 hash (optional).
/// </summary>
[JsonPropertyName("sha512")]
public string? Sha512 { get; init; }
/// <summary>
/// File size in bytes.
/// </summary>
[JsonPropertyName("size")]
public required long Size { get; init; }
}
/// <summary>
/// Reference to an SBOM file.
/// </summary>
public sealed record SbomReference
{
/// <summary>
/// Relative path to the SBOM file.
/// </summary>
[JsonPropertyName("path")]
public required string Path { get; init; }
/// <summary>
/// SBOM format (e.g., "cyclonedx-json", "spdx-json").
/// </summary>
[JsonPropertyName("format")]
public required string Format { get; init; }
/// <summary>
/// SBOM spec version (e.g., "1.5", "2.3").
/// </summary>
[JsonPropertyName("specVersion")]
public required string SpecVersion { get; init; }
/// <summary>
/// Artifact this SBOM describes.
/// </summary>
[JsonPropertyName("forArtifact")]
public required string ForArtifact { get; init; }
/// <summary>
/// Path to the signature file.
/// </summary>
[JsonPropertyName("signaturePath")]
public string? SignaturePath { get; init; }
/// <summary>
/// SHA-256 hash of the SBOM.
/// </summary>
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
}
/// <summary>
/// Reference to a provenance statement (SLSA).
/// </summary>
public sealed record ProvenanceReference
{
/// <summary>
/// Relative path to the provenance file.
/// </summary>
[JsonPropertyName("path")]
public required string Path { get; init; }
/// <summary>
/// Predicate type URI (e.g., "https://slsa.dev/provenance/v1").
/// </summary>
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
/// <summary>
/// Artifact this provenance describes.
/// </summary>
[JsonPropertyName("forArtifact")]
public required string ForArtifact { get; init; }
/// <summary>
/// Path to the signature file.
/// </summary>
[JsonPropertyName("signaturePath")]
public string? SignaturePath { get; init; }
/// <summary>
/// Builder ID from the provenance.
/// </summary>
[JsonPropertyName("builderId")]
public string? BuilderId { get; init; }
/// <summary>
/// SLSA level claimed.
/// </summary>
[JsonPropertyName("slsaLevel")]
public int? SlsaLevel { get; init; }
/// <summary>
/// SHA-256 hash of the provenance file.
/// </summary>
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
}
/// <summary>
/// Reference to a DSSE attestation.
/// </summary>
public sealed record AttestationReference
{
/// <summary>
/// Relative path to the attestation file.
/// </summary>
[JsonPropertyName("path")]
public required string Path { get; init; }
/// <summary>
/// Attestation type/predicate.
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Description of what this attestation covers.
/// </summary>
[JsonPropertyName("description")]
public string? Description { get; init; }
/// <summary>
/// SHA-256 hash of the attestation.
/// </summary>
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
}
/// <summary>
/// Rekor transparency log proof entry for offline verification.
/// </summary>
public sealed record RekorProofEntry
{
/// <summary>
/// Rekor log entry UUID.
/// </summary>
[JsonPropertyName("uuid")]
public required string Uuid { get; init; }
/// <summary>
/// Log index.
/// </summary>
[JsonPropertyName("logIndex")]
public required long LogIndex { get; init; }
/// <summary>
/// Integrated time (Unix timestamp).
/// </summary>
[JsonPropertyName("integratedTime")]
public required long IntegratedTime { get; init; }
/// <summary>
/// Artifact this proof is for.
/// </summary>
[JsonPropertyName("forArtifact")]
public required string ForArtifact { get; init; }
/// <summary>
/// Relative path to the inclusion proof JSON.
/// </summary>
[JsonPropertyName("inclusionProofPath")]
public required string InclusionProofPath { get; init; }
/// <summary>
/// Body of the log entry (base64).
/// </summary>
[JsonPropertyName("body")]
public string? Body { get; init; }
}

View File

@@ -0,0 +1,413 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under the BUSL-1.1 license.
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.EvidencePack.Models;
namespace StellaOps.Attestor.EvidencePack;
/// <summary>
/// Builder for constructing Release Evidence Packs.
/// </summary>
public sealed class ReleaseEvidencePackBuilder
{
private readonly ILogger<ReleaseEvidencePackBuilder> _logger;
private readonly List<ArtifactEntry> _artifacts = [];
private readonly Dictionary<string, ChecksumEntry> _checksums = [];
private readonly List<SbomReference> _sboms = [];
private readonly List<ProvenanceReference> _provenanceStatements = [];
private readonly List<AttestationReference> _attestations = [];
private readonly List<RekorProofEntry> _rekorProofs = [];
private string _releaseVersion = string.Empty;
private string _sourceCommit = string.Empty;
private long _sourceDateEpoch;
private string _signingKeyFingerprint = string.Empty;
private string? _rekorLogId;
private DateTimeOffset? _createdAt;
/// <summary>
/// Current bundle format version.
/// </summary>
public const string BundleFormatVersion = "1.0.0";
public ReleaseEvidencePackBuilder(ILogger<ReleaseEvidencePackBuilder> logger)
{
_logger = logger;
}
/// <summary>
/// Sets the release version.
/// </summary>
public ReleaseEvidencePackBuilder WithReleaseVersion(string version)
{
_releaseVersion = version ?? throw new ArgumentNullException(nameof(version));
return this;
}
/// <summary>
/// Sets the source commit SHA.
/// </summary>
public ReleaseEvidencePackBuilder WithSourceCommit(string commit)
{
_sourceCommit = commit ?? throw new ArgumentNullException(nameof(commit));
return this;
}
/// <summary>
/// Sets the SOURCE_DATE_EPOCH for reproducible builds.
/// </summary>
public ReleaseEvidencePackBuilder WithSourceDateEpoch(long epoch)
{
_sourceDateEpoch = epoch;
return this;
}
/// <summary>
/// Sets the signing key fingerprint.
/// </summary>
public ReleaseEvidencePackBuilder WithSigningKeyFingerprint(string fingerprint)
{
_signingKeyFingerprint = fingerprint ?? throw new ArgumentNullException(nameof(fingerprint));
return this;
}
/// <summary>
/// Sets the Rekor log ID.
/// </summary>
public ReleaseEvidencePackBuilder WithRekorLogId(string logId)
{
_rekorLogId = logId;
return this;
}
/// <summary>
/// Sets the creation timestamp (defaults to UtcNow if not set).
/// </summary>
public ReleaseEvidencePackBuilder WithCreatedAt(DateTimeOffset timestamp)
{
_createdAt = timestamp;
return this;
}
/// <summary>
/// Adds an artifact to the pack.
/// </summary>
public ReleaseEvidencePackBuilder AddArtifact(ArtifactEntry artifact)
{
ArgumentNullException.ThrowIfNull(artifact);
_artifacts.Add(artifact);
AddChecksumForFile(artifact.Path, artifact.Sha256, artifact.Sha512, artifact.Size);
_logger.LogDebug("Added artifact: {Path}", artifact.Path);
return this;
}
/// <summary>
/// Adds an artifact from a file path.
/// </summary>
public ReleaseEvidencePackBuilder AddArtifactFromFile(
string filePath,
string relativePath,
string name,
string platform,
string? signaturePath = null,
string? certificatePath = null)
{
ArgumentNullException.ThrowIfNull(filePath);
ArgumentNullException.ThrowIfNull(relativePath);
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"Artifact file not found: {filePath}");
}
var (sha256, sha512) = ComputeFileHashes(filePath);
var artifact = new ArtifactEntry
{
Path = relativePath,
Name = name,
Platform = platform,
Sha256 = sha256,
Sha512 = sha512,
Size = fileInfo.Length,
SignaturePath = signaturePath,
CertificatePath = certificatePath
};
return AddArtifact(artifact);
}
/// <summary>
/// Adds an SBOM reference to the pack.
/// </summary>
public ReleaseEvidencePackBuilder AddSbom(SbomReference sbom)
{
ArgumentNullException.ThrowIfNull(sbom);
_sboms.Add(sbom);
AddChecksumForFile(sbom.Path, sbom.Sha256, null, 0);
_logger.LogDebug("Added SBOM: {Path}", sbom.Path);
return this;
}
/// <summary>
/// Adds an SBOM from a file path.
/// </summary>
public ReleaseEvidencePackBuilder AddSbomFromFile(
string filePath,
string relativePath,
string format,
string specVersion,
string forArtifact,
string? signaturePath = null)
{
ArgumentNullException.ThrowIfNull(filePath);
ArgumentNullException.ThrowIfNull(relativePath);
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"SBOM file not found: {filePath}");
}
var (sha256, _) = ComputeFileHashes(filePath);
var sbom = new SbomReference
{
Path = relativePath,
Format = format,
SpecVersion = specVersion,
ForArtifact = forArtifact,
SignaturePath = signaturePath,
Sha256 = sha256
};
return AddSbom(sbom);
}
/// <summary>
/// Adds a provenance statement to the pack.
/// </summary>
public ReleaseEvidencePackBuilder AddProvenance(ProvenanceReference provenance)
{
ArgumentNullException.ThrowIfNull(provenance);
_provenanceStatements.Add(provenance);
AddChecksumForFile(provenance.Path, provenance.Sha256, null, 0);
_logger.LogDebug("Added provenance: {Path}", provenance.Path);
return this;
}
/// <summary>
/// Adds a provenance statement from a file path.
/// </summary>
public ReleaseEvidencePackBuilder AddProvenanceFromFile(
string filePath,
string relativePath,
string predicateType,
string forArtifact,
string? signaturePath = null,
string? builderId = null,
int? slsaLevel = null)
{
ArgumentNullException.ThrowIfNull(filePath);
ArgumentNullException.ThrowIfNull(relativePath);
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"Provenance file not found: {filePath}");
}
var (sha256, _) = ComputeFileHashes(filePath);
var provenance = new ProvenanceReference
{
Path = relativePath,
PredicateType = predicateType,
ForArtifact = forArtifact,
SignaturePath = signaturePath,
BuilderId = builderId,
SlsaLevel = slsaLevel,
Sha256 = sha256
};
return AddProvenance(provenance);
}
/// <summary>
/// Adds an attestation reference to the pack.
/// </summary>
public ReleaseEvidencePackBuilder AddAttestation(AttestationReference attestation)
{
ArgumentNullException.ThrowIfNull(attestation);
_attestations.Add(attestation);
AddChecksumForFile(attestation.Path, attestation.Sha256, null, 0);
_logger.LogDebug("Added attestation: {Path}", attestation.Path);
return this;
}
/// <summary>
/// Adds an attestation from a file path.
/// </summary>
public ReleaseEvidencePackBuilder AddAttestationFromFile(
string filePath,
string relativePath,
string type,
string? description = null)
{
ArgumentNullException.ThrowIfNull(filePath);
ArgumentNullException.ThrowIfNull(relativePath);
var fileInfo = new FileInfo(filePath);
if (!fileInfo.Exists)
{
throw new FileNotFoundException($"Attestation file not found: {filePath}");
}
var (sha256, _) = ComputeFileHashes(filePath);
var attestation = new AttestationReference
{
Path = relativePath,
Type = type,
Description = description,
Sha256 = sha256
};
return AddAttestation(attestation);
}
/// <summary>
/// Adds a Rekor proof entry to the pack.
/// </summary>
public ReleaseEvidencePackBuilder AddRekorProof(RekorProofEntry proof)
{
ArgumentNullException.ThrowIfNull(proof);
_rekorProofs.Add(proof);
_logger.LogDebug("Added Rekor proof: {Uuid}", proof.Uuid);
return this;
}
/// <summary>
/// Adds a file's checksum to the manifest.
/// </summary>
public ReleaseEvidencePackBuilder AddChecksumForFile(string path, string sha256, string? sha512, long size)
{
_checksums[path] = new ChecksumEntry
{
Sha256 = sha256,
Sha512 = sha512,
Size = size
};
return this;
}
/// <summary>
/// Builds the Release Evidence Pack manifest.
/// </summary>
public ReleaseEvidencePackManifest Build()
{
ValidateRequiredFields();
var manifest = new ReleaseEvidencePackManifest
{
BundleFormatVersion = BundleFormatVersion,
ReleaseVersion = _releaseVersion,
CreatedAt = _createdAt ?? DateTimeOffset.UtcNow,
SourceCommit = _sourceCommit,
SourceDateEpoch = _sourceDateEpoch,
Artifacts = [.. _artifacts],
Checksums = _checksums.ToImmutableDictionary(),
Sboms = [.. _sboms],
ProvenanceStatements = [.. _provenanceStatements],
Attestations = [.. _attestations],
RekorProofs = [.. _rekorProofs],
SigningKeyFingerprint = _signingKeyFingerprint,
RekorLogId = _rekorLogId
};
// Compute manifest hash
var manifestJson = JsonSerializer.Serialize(manifest, ManifestSerializerContext.Default.ReleaseEvidencePackManifest);
var manifestHash = ComputeSha256(Encoding.UTF8.GetBytes(manifestJson));
_logger.LogInformation(
"Built evidence pack manifest for release {Version} with {ArtifactCount} artifacts",
_releaseVersion,
_artifacts.Count);
return manifest with { ManifestHash = manifestHash };
}
private void ValidateRequiredFields()
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(_releaseVersion))
errors.Add("Release version is required");
if (string.IsNullOrWhiteSpace(_sourceCommit))
errors.Add("Source commit is required");
if (_sourceDateEpoch <= 0)
errors.Add("SOURCE_DATE_EPOCH is required and must be positive");
if (string.IsNullOrWhiteSpace(_signingKeyFingerprint))
errors.Add("Signing key fingerprint is required");
if (_artifacts.Count == 0)
errors.Add("At least one artifact is required");
if (errors.Count > 0)
{
throw new InvalidOperationException(
$"Cannot build evidence pack manifest: {string.Join("; ", errors)}");
}
}
private static (string sha256, string sha512) ComputeFileHashes(string filePath)
{
using var stream = File.OpenRead(filePath);
using var sha256 = SHA256.Create();
using var sha512 = SHA512.Create();
var buffer = new byte[8192];
int bytesRead;
while ((bytesRead = stream.Read(buffer, 0, buffer.Length)) > 0)
{
sha256.TransformBlock(buffer, 0, bytesRead, null, 0);
sha512.TransformBlock(buffer, 0, bytesRead, null, 0);
}
sha256.TransformFinalBlock([], 0, 0);
sha512.TransformFinalBlock([], 0, 0);
return (
Convert.ToHexString(sha256.Hash!).ToLowerInvariant(),
Convert.ToHexString(sha512.Hash!).ToLowerInvariant()
);
}
private static string ComputeSha256(byte[] data)
{
var hash = SHA256.HashData(data);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// JSON serialization context for manifest.
/// </summary>
[JsonSerializable(typeof(ReleaseEvidencePackManifest))]
[JsonSourceGenerationOptions(
WriteIndented = true,
PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)]
internal partial class ManifestSerializerContext : JsonSerializerContext
{
}

View File

@@ -0,0 +1,605 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under the BUSL-1.1 license.
using System.IO.Compression;
using System.Reflection;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.EvidencePack.Models;
namespace StellaOps.Attestor.EvidencePack;
/// <summary>
/// Serializes Release Evidence Packs to various output formats.
/// </summary>
public sealed class ReleaseEvidencePackSerializer
{
private readonly ILogger<ReleaseEvidencePackSerializer> _logger;
public ReleaseEvidencePackSerializer(ILogger<ReleaseEvidencePackSerializer> logger)
{
_logger = logger;
}
/// <summary>
/// Writes the evidence pack to a directory structure.
/// </summary>
public async Task SerializeToDirectoryAsync(
ReleaseEvidencePackManifest manifest,
string outputPath,
string artifactsSourcePath,
string publicKeyPath,
string? rekorPublicKeyPath = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(outputPath);
ArgumentNullException.ThrowIfNull(artifactsSourcePath);
_logger.LogInformation("Serializing evidence pack to directory: {Path}", outputPath);
// Create directory structure
var bundleDir = Path.Combine(outputPath, $"stella-release-{manifest.ReleaseVersion}-evidence-pack");
Directory.CreateDirectory(bundleDir);
Directory.CreateDirectory(Path.Combine(bundleDir, "artifacts"));
Directory.CreateDirectory(Path.Combine(bundleDir, "checksums"));
Directory.CreateDirectory(Path.Combine(bundleDir, "sbom"));
Directory.CreateDirectory(Path.Combine(bundleDir, "provenance"));
Directory.CreateDirectory(Path.Combine(bundleDir, "attestations"));
Directory.CreateDirectory(Path.Combine(bundleDir, "rekor-proofs"));
Directory.CreateDirectory(Path.Combine(bundleDir, "rekor-proofs", "log-entries"));
// Copy public keys
File.Copy(publicKeyPath, Path.Combine(bundleDir, "cosign.pub"), overwrite: true);
if (!string.IsNullOrEmpty(rekorPublicKeyPath) && File.Exists(rekorPublicKeyPath))
{
File.Copy(rekorPublicKeyPath, Path.Combine(bundleDir, "rekor-public-key.pub"), overwrite: true);
}
// Copy artifacts from source
foreach (var artifact in manifest.Artifacts)
{
var sourcePath = Path.Combine(artifactsSourcePath, Path.GetFileName(artifact.Path));
var destPath = Path.Combine(bundleDir, artifact.Path);
Directory.CreateDirectory(Path.GetDirectoryName(destPath)!);
if (File.Exists(sourcePath))
{
File.Copy(sourcePath, destPath, overwrite: true);
_logger.LogDebug("Copied artifact: {Path}", artifact.Path);
}
else
{
_logger.LogWarning("Artifact source not found: {Path}", sourcePath);
}
// Copy signature if exists
if (!string.IsNullOrEmpty(artifact.SignaturePath))
{
var sigSource = Path.Combine(artifactsSourcePath, Path.GetFileName(artifact.SignaturePath));
if (File.Exists(sigSource))
{
var sigDest = Path.Combine(bundleDir, artifact.SignaturePath);
Directory.CreateDirectory(Path.GetDirectoryName(sigDest)!);
File.Copy(sigSource, sigDest, overwrite: true);
}
}
}
// Generate checksums files
await GenerateChecksumsFilesAsync(manifest, bundleDir, cancellationToken);
// Write manifest
var manifestJson = JsonSerializer.Serialize(manifest, ManifestSerializerContext.Default.ReleaseEvidencePackManifest);
await File.WriteAllTextAsync(
Path.Combine(bundleDir, "manifest.json"),
manifestJson,
cancellationToken);
// Write VERIFY.md
var verifyMd = GenerateVerifyMd(manifest);
await File.WriteAllTextAsync(
Path.Combine(bundleDir, "VERIFY.md"),
verifyMd,
cancellationToken);
// Write verify.sh
var verifyShContent = await LoadTemplateAsync("verify.sh.template");
var verifyShPath = Path.Combine(bundleDir, "verify.sh");
await File.WriteAllTextAsync(verifyShPath, verifyShContent, cancellationToken);
#if !WINDOWS
// Make executable on Unix
File.SetUnixFileMode(verifyShPath, UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute);
#endif
// Write verify.ps1
var verifyPs1Content = await LoadTemplateAsync("verify.ps1.template");
await File.WriteAllTextAsync(
Path.Combine(bundleDir, "verify.ps1"),
verifyPs1Content,
cancellationToken);
_logger.LogInformation("Evidence pack written to: {Path}", bundleDir);
}
/// <summary>
/// Writes the evidence pack to a directory structure without copying artifacts.
/// This overload is useful for testing and scenarios where artifacts are referenced but not bundled.
/// </summary>
public async Task SerializeToDirectoryAsync(
ReleaseEvidencePackManifest manifest,
string outputPath,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(outputPath);
_logger.LogInformation("Serializing evidence pack to directory (no artifact copy): {Path}", outputPath);
// Create directory structure directly in outputPath for simpler test assertions
Directory.CreateDirectory(outputPath);
Directory.CreateDirectory(Path.Combine(outputPath, "artifacts"));
Directory.CreateDirectory(Path.Combine(outputPath, "checksums"));
Directory.CreateDirectory(Path.Combine(outputPath, "sbom"));
Directory.CreateDirectory(Path.Combine(outputPath, "provenance"));
Directory.CreateDirectory(Path.Combine(outputPath, "attestations"));
Directory.CreateDirectory(Path.Combine(outputPath, "rekor-proofs"));
Directory.CreateDirectory(Path.Combine(outputPath, "rekor-proofs", "log-entries"));
// Write placeholder cosign.pub for testing
await File.WriteAllTextAsync(
Path.Combine(outputPath, "cosign.pub"),
"-----BEGIN PUBLIC KEY-----\nMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEtest\n-----END PUBLIC KEY-----\n",
cancellationToken);
// Generate checksums files
await GenerateChecksumsFilesAsync(manifest, outputPath, cancellationToken);
// Write manifest
var manifestJson = JsonSerializer.Serialize(manifest, ManifestSerializerContext.Default.ReleaseEvidencePackManifest);
await File.WriteAllTextAsync(
Path.Combine(outputPath, "manifest.json"),
manifestJson,
cancellationToken);
// Write VERIFY.md
var verifyMd = GenerateVerifyMd(manifest);
await File.WriteAllTextAsync(
Path.Combine(outputPath, "VERIFY.md"),
verifyMd,
cancellationToken);
// Write verify.sh
var verifyShContent = await LoadTemplateAsync("verify.sh.template");
var verifyShPath = Path.Combine(outputPath, "verify.sh");
await File.WriteAllTextAsync(verifyShPath, verifyShContent, cancellationToken);
#if !WINDOWS
// Make executable on Unix
File.SetUnixFileMode(verifyShPath, UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute);
#endif
// Write verify.ps1
var verifyPs1Content = await LoadTemplateAsync("verify.ps1.template");
await File.WriteAllTextAsync(
Path.Combine(outputPath, "verify.ps1"),
verifyPs1Content,
cancellationToken);
_logger.LogInformation("Evidence pack written to: {Path}", outputPath);
}
/// <summary>
/// Writes the evidence pack as a .tar.gz archive.
/// </summary>
public async Task SerializeToTarGzAsync(
ReleaseEvidencePackManifest manifest,
Stream outputStream,
string artifactsSourcePath,
string publicKeyPath,
string? rekorPublicKeyPath = null,
CancellationToken cancellationToken = default)
{
// Create temp directory, serialize, then create tar.gz
var tempDir = Path.Combine(Path.GetTempPath(), $"evidence-pack-{Guid.NewGuid():N}");
try
{
await SerializeToDirectoryAsync(
manifest,
tempDir,
artifactsSourcePath,
publicKeyPath,
rekorPublicKeyPath,
cancellationToken);
var bundleDir = Directory.GetDirectories(tempDir).FirstOrDefault()
?? throw new InvalidOperationException("Bundle directory not created");
// Create tar.gz using GZipStream
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true);
await CreateTarFromDirectoryAsync(bundleDir, gzipStream, cancellationToken);
_logger.LogInformation("Evidence pack archived as tar.gz");
}
finally
{
if (Directory.Exists(tempDir))
{
Directory.Delete(tempDir, recursive: true);
}
}
}
/// <summary>
/// Writes the evidence pack as a .tar.gz archive without copying artifacts.
/// This overload is useful for testing and scenarios where artifacts are referenced but not bundled.
/// </summary>
public async Task SerializeToTarGzAsync(
ReleaseEvidencePackManifest manifest,
Stream outputStream,
string bundleName,
CancellationToken cancellationToken = default)
{
// Create temp directory, serialize, then create tar.gz
var tempDir = Path.Combine(Path.GetTempPath(), $"evidence-pack-{Guid.NewGuid():N}");
var bundleDir = Path.Combine(tempDir, bundleName);
try
{
await SerializeToDirectoryAsync(manifest, bundleDir, cancellationToken);
// Create tar.gz using GZipStream
await using var gzipStream = new GZipStream(outputStream, CompressionLevel.Optimal, leaveOpen: true);
await CreateTarFromDirectoryAsync(bundleDir, gzipStream, cancellationToken);
_logger.LogInformation("Evidence pack archived as tar.gz");
}
finally
{
if (Directory.Exists(tempDir))
{
Directory.Delete(tempDir, recursive: true);
}
}
}
/// <summary>
/// Writes the evidence pack as a .zip archive.
/// </summary>
public async Task SerializeToZipAsync(
ReleaseEvidencePackManifest manifest,
Stream outputStream,
string artifactsSourcePath,
string publicKeyPath,
string? rekorPublicKeyPath = null,
CancellationToken cancellationToken = default)
{
// Create temp directory, serialize, then create zip
var tempDir = Path.Combine(Path.GetTempPath(), $"evidence-pack-{Guid.NewGuid():N}");
try
{
await SerializeToDirectoryAsync(
manifest,
tempDir,
artifactsSourcePath,
publicKeyPath,
rekorPublicKeyPath,
cancellationToken);
var bundleDir = Directory.GetDirectories(tempDir).FirstOrDefault()
?? throw new InvalidOperationException("Bundle directory not created");
using var archive = new ZipArchive(outputStream, ZipArchiveMode.Create, leaveOpen: true);
await AddDirectoryToZipAsync(archive, bundleDir, Path.GetFileName(bundleDir), cancellationToken);
_logger.LogInformation("Evidence pack archived as zip");
}
finally
{
if (Directory.Exists(tempDir))
{
Directory.Delete(tempDir, recursive: true);
}
}
}
/// <summary>
/// Writes the evidence pack as a .zip archive without copying artifacts.
/// This overload is useful for testing and scenarios where artifacts are referenced but not bundled.
/// </summary>
public async Task SerializeToZipAsync(
ReleaseEvidencePackManifest manifest,
Stream outputStream,
string bundleName,
CancellationToken cancellationToken = default)
{
// Create temp directory, serialize, then create zip
var tempDir = Path.Combine(Path.GetTempPath(), $"evidence-pack-{Guid.NewGuid():N}");
var bundleDir = Path.Combine(tempDir, bundleName);
try
{
await SerializeToDirectoryAsync(manifest, bundleDir, cancellationToken);
using var archive = new ZipArchive(outputStream, ZipArchiveMode.Create, leaveOpen: true);
await AddDirectoryToZipAsync(archive, bundleDir, bundleName, cancellationToken);
_logger.LogInformation("Evidence pack archived as zip");
}
finally
{
if (Directory.Exists(tempDir))
{
Directory.Delete(tempDir, recursive: true);
}
}
}
private async Task GenerateChecksumsFilesAsync(
ReleaseEvidencePackManifest manifest,
string bundleDir,
CancellationToken cancellationToken)
{
var sha256Lines = new StringBuilder();
var sha512Lines = new StringBuilder();
foreach (var artifact in manifest.Artifacts)
{
sha256Lines.AppendLine($"{artifact.Sha256} {artifact.Path}");
if (!string.IsNullOrEmpty(artifact.Sha512))
{
sha512Lines.AppendLine($"{artifact.Sha512} {artifact.Path}");
}
}
await File.WriteAllTextAsync(
Path.Combine(bundleDir, "checksums", "SHA256SUMS"),
sha256Lines.ToString(),
cancellationToken);
if (sha512Lines.Length > 0)
{
await File.WriteAllTextAsync(
Path.Combine(bundleDir, "checksums", "SHA512SUMS"),
sha512Lines.ToString(),
cancellationToken);
}
}
private string GenerateVerifyMd(ReleaseEvidencePackManifest manifest)
{
var sb = new StringBuilder();
sb.AppendLine($"# Stella Ops Release {manifest.ReleaseVersion} - Verification Guide");
sb.AppendLine();
sb.AppendLine("This bundle contains everything needed to verify the authenticity and integrity");
sb.AppendLine($"of Stella Ops release {manifest.ReleaseVersion} in an air-gapped environment.");
sb.AppendLine();
sb.AppendLine("## Quick Verification (requires cosign)");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine("./verify.sh");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("## Manual Verification (no external tools)");
sb.AppendLine();
sb.AppendLine("### 1. Verify Checksums");
sb.AppendLine("```bash");
sb.AppendLine("cd artifacts/");
sb.AppendLine("sha256sum -c ../checksums/SHA256SUMS");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("### 2. Verify Signatures (requires cosign)");
sb.AppendLine("```bash");
sb.AppendLine("cosign verify-blob \\");
sb.AppendLine(" --key cosign.pub \\");
sb.AppendLine(" --signature checksums/SHA256SUMS.sig \\");
sb.AppendLine(" checksums/SHA256SUMS");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("### 3. Verify Provenance");
sb.AppendLine("```bash");
if (manifest.ProvenanceStatements.Length > 0)
{
var firstProv = manifest.ProvenanceStatements[0];
sb.AppendLine("cosign verify-blob \\");
sb.AppendLine(" --key cosign.pub \\");
sb.AppendLine($" --signature {firstProv.SignaturePath ?? firstProv.Path + ".sig"} \\");
sb.AppendLine($" {firstProv.Path}");
sb.AppendLine();
sb.AppendLine("# Inspect provenance contents:");
sb.AppendLine($"cat {firstProv.Path} | jq .");
}
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("## Transparency Log Verification (requires network)");
sb.AppendLine();
if (manifest.RekorProofs.Length > 0)
{
sb.AppendLine("The Rekor transparency log entries for this release can be verified:");
sb.AppendLine();
sb.AppendLine("```bash");
var firstArtifact = manifest.Artifacts.FirstOrDefault();
if (firstArtifact != null)
{
sb.AppendLine($"rekor-cli verify --artifact artifacts/{Path.GetFileName(firstArtifact.Path)} \\");
sb.AppendLine($" --signature artifacts/{Path.GetFileName(firstArtifact.Path)}.sig \\");
sb.AppendLine(" --public-key cosign.pub");
}
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("Rekor log entries (UUIDs):");
foreach (var proof in manifest.RekorProofs)
{
sb.AppendLine($"- `{proof.Uuid}` (index: {proof.LogIndex})");
}
}
else
{
sb.AppendLine("No Rekor proofs included in this bundle.");
}
sb.AppendLine();
sb.AppendLine("## Bundle Contents");
sb.AppendLine();
sb.AppendLine("| File | SHA-256 | Description |");
sb.AppendLine("|------|---------|-------------|");
foreach (var artifact in manifest.Artifacts)
{
sb.AppendLine($"| `{artifact.Path}` | `{artifact.Sha256[..16]}...` | {artifact.Name} ({artifact.Platform}) |");
}
sb.AppendLine();
sb.AppendLine("## Signing Identity");
sb.AppendLine();
sb.AppendLine($"- **Public Key Fingerprint:** `{manifest.SigningKeyFingerprint}`");
sb.AppendLine("- **Signing Method:** Cosign (keyless via Fulcio / key-based)");
if (!string.IsNullOrEmpty(manifest.RekorLogId))
{
sb.AppendLine($"- **Rekor Log ID:** `{manifest.RekorLogId}`");
}
sb.AppendLine();
sb.AppendLine("## Build Reproducibility");
sb.AppendLine();
sb.AppendLine($"This release was built with `SOURCE_DATE_EPOCH={manifest.SourceDateEpoch}`.");
sb.AppendLine("To reproduce the build:");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine($"git checkout {manifest.SourceCommit}");
sb.AppendLine($"export SOURCE_DATE_EPOCH={manifest.SourceDateEpoch}");
sb.AppendLine("make release");
sb.AppendLine("```");
sb.AppendLine();
sb.AppendLine("---");
sb.AppendLine($"Generated: {manifest.CreatedAt:O}");
sb.AppendLine("Stella Ops Release Engineering");
return sb.ToString();
}
private static async Task<string> LoadTemplateAsync(string templateName)
{
var assembly = Assembly.GetExecutingAssembly();
var resourceName = $"StellaOps.Attestor.EvidencePack.Templates.{templateName}";
await using var stream = assembly.GetManifestResourceStream(resourceName);
if (stream == null)
{
throw new InvalidOperationException($"Template not found: {templateName}");
}
using var reader = new StreamReader(stream);
return await reader.ReadToEndAsync();
}
private static async Task CreateTarFromDirectoryAsync(
string sourceDir,
Stream outputStream,
CancellationToken cancellationToken)
{
// Simple tar implementation - writes POSIX ustar format
var baseName = Path.GetFileName(sourceDir);
var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories);
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
var relativePath = Path.GetRelativePath(sourceDir, file);
var tarPath = $"{baseName}/{relativePath.Replace('\\', '/')}";
var fileInfo = new FileInfo(file);
var content = await File.ReadAllBytesAsync(file, cancellationToken);
// Write tar header
var header = CreateTarHeader(tarPath, fileInfo.Length);
await outputStream.WriteAsync(header, cancellationToken);
// Write file content
await outputStream.WriteAsync(content, cancellationToken);
// Pad to 512-byte boundary
var padding = (512 - (int)(fileInfo.Length % 512)) % 512;
if (padding > 0)
{
await outputStream.WriteAsync(new byte[padding], cancellationToken);
}
}
// Write two empty blocks to end tar
await outputStream.WriteAsync(new byte[1024], cancellationToken);
}
private static byte[] CreateTarHeader(string name, long size)
{
var header = new byte[512];
// Name (100 bytes)
var nameBytes = Encoding.ASCII.GetBytes(name.Length > 100 ? name[..100] : name);
Array.Copy(nameBytes, 0, header, 0, nameBytes.Length);
// Mode (8 bytes) - 0644
Encoding.ASCII.GetBytes("0000644\0").CopyTo(header, 100);
// UID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 108);
// GID (8 bytes) - 0
Encoding.ASCII.GetBytes("0000000\0").CopyTo(header, 116);
// Size (12 bytes) - octal
var sizeStr = Convert.ToString(size, 8).PadLeft(11, '0') + "\0";
Encoding.ASCII.GetBytes(sizeStr).CopyTo(header, 124);
// Mtime (12 bytes) - 0
Encoding.ASCII.GetBytes("00000000000\0").CopyTo(header, 136);
// Checksum placeholder (8 bytes of spaces)
Encoding.ASCII.GetBytes(" ").CopyTo(header, 148);
// Type flag (1 byte) - regular file
header[156] = (byte)'0';
// USTAR indicator
Encoding.ASCII.GetBytes("ustar\0").CopyTo(header, 257);
Encoding.ASCII.GetBytes("00").CopyTo(header, 263);
// Compute checksum
var checksum = header.Sum(b => b);
var checksumStr = Convert.ToString(checksum, 8).PadLeft(6, '0') + "\0 ";
Encoding.ASCII.GetBytes(checksumStr).CopyTo(header, 148);
return header;
}
private static async Task AddDirectoryToZipAsync(
ZipArchive archive,
string sourceDir,
string entryPrefix,
CancellationToken cancellationToken)
{
var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories);
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
var relativePath = Path.GetRelativePath(sourceDir, file);
var entryName = $"{entryPrefix}/{relativePath.Replace('\\', '/')}";
var entry = archive.CreateEntry(entryName, CompressionLevel.Optimal);
await using var entryStream = entry.Open();
await using var fileStream = File.OpenRead(file);
await fileStream.CopyToAsync(entryStream, cancellationToken);
}
}
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Attestor.EvidencePack</RootNamespace>
<Description>Release Evidence Pack builder for customer-facing verification bundles with offline support.</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
<PackageReference Include="System.IO.Compression" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Attestor.Bundle\StellaOps.Attestor.Bundle.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Templates\VERIFY.md.template" />
<EmbeddedResource Include="Templates\verify.sh.template" />
<EmbeddedResource Include="Templates\verify.ps1.template" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,222 @@
# Stella Ops Release {{VERSION}} - Verification Guide
This bundle contains everything needed to verify the authenticity and integrity
of Stella Ops release {{VERSION}} in an air-gapped environment.
## Quick Verification (requires cosign)
```bash
./verify.sh
```
Or on Windows (PowerShell 7+):
```powershell
./verify.ps1
```
## Manual Verification
### 1. Verify Checksums
Verify all artifacts match their expected checksums:
```bash
cd artifacts/
sha256sum -c ../checksums/SHA256SUMS
```
On Windows:
```powershell
Get-Content ..\checksums\SHA256SUMS | ForEach-Object {
$parts = $_ -split '\s+', 2
$expected = $parts[0]
$file = $parts[1]
$computed = (Get-FileHash -Path $file -Algorithm SHA256).Hash.ToLower()
if ($computed -eq $expected) {
Write-Host "[PASS] $file" -ForegroundColor Green
} else {
Write-Host "[FAIL] $file" -ForegroundColor Red
}
}
```
### 2. Verify Checksums Signature (requires cosign)
Verify that the checksums file was signed by Stella Ops:
```bash
cosign verify-blob \
--key cosign.pub \
--signature checksums/SHA256SUMS.sig \
checksums/SHA256SUMS
```
### 3. Verify Individual Artifact Signatures
```bash
# For each artifact
cosign verify-blob \
--key cosign.pub \
--signature artifacts/stella-{{VERSION}}-linux-x64.tar.gz.sig \
artifacts/stella-{{VERSION}}-linux-x64.tar.gz
```
### 4. Verify Provenance (SLSA)
Verify that the SLSA provenance statement was signed and inspect its contents:
```bash
# Verify signature
cosign verify-blob \
--key cosign.pub \
--signature provenance/stella-cli.slsa.intoto.jsonl.sig \
provenance/stella-cli.slsa.intoto.jsonl
# Inspect provenance contents
cat provenance/stella-cli.slsa.intoto.jsonl | jq .
```
The provenance should show:
- **Builder ID**: `https://ci.stella-ops.org/builder/v1`
- **Source commit**: `{{SOURCE_COMMIT}}`
- **Build timestamp**: Matches release time
- **Materials**: Lists all build inputs with digests
### 5. Verify SBOMs
```bash
# Verify SBOM signature
cosign verify-blob \
--key cosign.pub \
--signature sbom/stella-cli.cdx.json.sig \
sbom/stella-cli.cdx.json
# Inspect SBOM (requires jq or any JSON viewer)
cat sbom/stella-cli.cdx.json | jq '.components | length'
```
## Transparency Log Verification (requires network)
If you have network access, you can verify the artifacts were recorded in the
Rekor transparency log:
```bash
rekor-cli verify \
--artifact artifacts/stella-{{VERSION}}-linux-x64.tar.gz \
--signature artifacts/stella-{{VERSION}}-linux-x64.tar.gz.sig \
--public-key cosign.pub
```
### Rekor Log Entries
The following Rekor log entries are associated with this release:
{{REKOR_ENTRIES}}
You can look up any entry:
```bash
rekor-cli get --uuid <UUID>
```
## Offline Rekor Proof Verification
If Rekor proofs are included in this bundle (in `rekor-proofs/`), you can verify
Merkle inclusion proofs without network access:
```bash
# Verify inclusion proof (advanced)
# See: https://docs.sigstore.dev/verification/offline/
```
## Bundle Contents
| Path | Description |
|------|-------------|
| `cosign.pub` | Stella Ops signing public key |
| `rekor-public-key.pub` | Rekor transparency log public key (if included) |
| `checksums/SHA256SUMS` | SHA-256 checksums for all artifacts |
| `checksums/SHA256SUMS.sig` | Cosign signature of checksums |
| `checksums/SHA512SUMS` | SHA-512 checksums (optional) |
| `artifacts/` | Release binaries and archives |
| `sbom/` | Software Bill of Materials (CycloneDX) |
| `provenance/` | SLSA provenance statements (in-toto) |
| `attestations/` | Additional DSSE attestations |
| `rekor-proofs/` | Transparency log inclusion proofs |
| `manifest.json` | Bundle manifest with all file hashes |
## Signing Identity
| Property | Value |
|----------|-------|
| **Signing Method** | Cosign (keyless via Fulcio / key-based) |
| **Public Key Fingerprint** | `{{KEY_FINGERPRINT}}` |
| **Rekor Log ID** | `{{REKOR_LOG_ID}}` |
| **Certificate OIDC Issuer** | `https://oauth2.sigstore.dev/auth` |
| **Certificate Identity** | `https://ci.stella-ops.org` |
## Build Reproducibility
This release was built with deterministic settings:
| Property | Value |
|----------|-------|
| **SOURCE_DATE_EPOCH** | `{{SOURCE_DATE_EPOCH}}` |
| **Source Commit** | `{{SOURCE_COMMIT}}` |
| **.NET SDK Version** | See `global.json` |
| **Build Configuration** | Release |
To reproduce the build:
```bash
git clone https://git.stella-ops.org/stella-ops.org/git.stella-ops.org.git
cd git.stella-ops.org
git checkout {{SOURCE_COMMIT}}
export SOURCE_DATE_EPOCH={{SOURCE_DATE_EPOCH}}
make release
# Compare checksums
sha256sum dist/* | diff - <(cat path/to/evidence-pack/checksums/SHA256SUMS)
```
## Troubleshooting
### "cosign: command not found"
Install cosign:
- macOS: `brew install cosign`
- Linux: Download from https://github.com/sigstore/cosign/releases
- Windows: Download from https://github.com/sigstore/cosign/releases
### "Error: no matching signatures"
Ensure you're using the `cosign.pub` file from this bundle, not a different key.
### Checksum mismatch
If checksums don't match:
1. Re-download the artifact
2. Verify the download completed successfully
3. Check for file corruption during transfer
### Certificate verification failed
For keyless-signed artifacts, you may need to specify the expected identity:
```bash
cosign verify-blob \
--certificate-identity "https://ci.stella-ops.org" \
--certificate-oidc-issuer "https://oauth2.sigstore.dev/auth" \
--signature artifact.sig \
artifact
```
---
**Generated:** {{TIMESTAMP}}
**Bundle Format Version:** {{BUNDLE_VERSION}}
Stella Ops Release Engineering
https://stella-ops.org

View File

@@ -0,0 +1,384 @@
#Requires -Version 7.0
<#
.SYNOPSIS
Stella Ops Release Evidence Pack Verifier (PowerShell)
.DESCRIPTION
Verifies release integrity offline using PowerShell and cosign.
.PARAMETER SkipRekor
Skip Rekor proof verification (default in offline mode)
.PARAMETER RequireRekor
Require Rekor proof verification
.PARAMETER Artifact
Verify only the specified artifact
.PARAMETER Verbose
Show detailed output
.PARAMETER Json
Output results as JSON
.EXAMPLE
./verify.ps1
Verify all artifacts with default settings
.EXAMPLE
./verify.ps1 -Artifact "artifacts/stella-1.0.0-linux-x64.tar.gz"
Verify only the specified artifact
#>
[CmdletBinding()]
param(
[switch]$SkipRekor = $true,
[switch]$RequireRekor,
[string]$Artifact,
[switch]$Json
)
$ErrorActionPreference = 'Stop'
# Configuration
$ScriptDir = $PSScriptRoot
$CosignPub = Join-Path $ScriptDir "cosign.pub"
$ChecksumsDir = Join-Path $ScriptDir "checksums"
$ArtifactsDir = Join-Path $ScriptDir "artifacts"
$ProvenanceDir = Join-Path $ScriptDir "provenance"
$SbomDir = Join-Path $ScriptDir "sbom"
# Results tracking
$Results = @{
Checksums = @{ Passed = 0; Failed = 0 }
Signatures = @{ Passed = 0; Failed = 0 }
Provenance = @{ Passed = 0; Failed = 0 }
}
function Write-Pass {
param([string]$Message)
if (-not $Json) {
Write-Host "[PASS] " -ForegroundColor Green -NoNewline
Write-Host $Message
}
}
function Write-Fail {
param([string]$Message)
if (-not $Json) {
Write-Host "[FAIL] " -ForegroundColor Red -NoNewline
Write-Host $Message
}
}
function Write-Warn {
param([string]$Message)
if (-not $Json) {
Write-Host "[WARN] " -ForegroundColor Yellow -NoNewline
Write-Host $Message
}
}
function Test-CosignAvailable {
try {
$null = Get-Command cosign -ErrorAction Stop
return $true
}
catch {
Write-Warn "cosign not found - signature verification will be skipped"
Write-Warn "Install cosign: https://docs.sigstore.dev/cosign/installation/"
return $false
}
}
function Get-FileHashSha256 {
param([string]$Path)
$hash = Get-FileHash -Path $Path -Algorithm SHA256
return $hash.Hash.ToLower()
}
function Test-Checksums {
Write-Verbose "Verifying artifact checksums..."
$sha256sumsPath = Join-Path $ChecksumsDir "SHA256SUMS"
if (-not (Test-Path $sha256sumsPath)) {
Write-Fail "SHA256SUMS file not found"
return $false
}
$failed = $false
$lines = Get-Content $sha256sumsPath
foreach ($line in $lines) {
if ([string]::IsNullOrWhiteSpace($line)) { continue }
$parts = $line -split '\s+', 2
$expectedHash = $parts[0]
$filePath = $parts[1]
# Skip if single artifact specified
if ($Artifact -and $filePath -ne $Artifact) { continue }
$fullPath = Join-Path $ScriptDir $filePath
if (-not (Test-Path $fullPath)) {
Write-Fail "File not found: $filePath"
$Results.Checksums.Failed++
$failed = $true
continue
}
$computedHash = Get-FileHashSha256 -Path $fullPath
if ($computedHash -eq $expectedHash) {
Write-Pass "Checksum verified: $filePath"
$Results.Checksums.Passed++
}
else {
Write-Fail "Checksum mismatch: $filePath"
Write-Verbose " Expected: $expectedHash"
Write-Verbose " Got: $computedHash"
$Results.Checksums.Failed++
$failed = $true
}
}
return -not $failed
}
function Test-ChecksumsSignature {
if (-not (Test-CosignAvailable)) {
Write-Warn "Skipping checksums signature verification (cosign not available)"
return $true
}
Write-Verbose "Verifying SHA256SUMS signature..."
$sha256sumsPath = Join-Path $ChecksumsDir "SHA256SUMS"
$sigPath = Join-Path $ChecksumsDir "SHA256SUMS.sig"
if (-not (Test-Path $sigPath)) {
Write-Warn "SHA256SUMS.sig not found - skipping signature verification"
return $true
}
$result = & cosign verify-blob `
--key $CosignPub `
--signature $sigPath `
$sha256sumsPath 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Pass "SHA256SUMS signature verified"
$Results.Signatures.Passed++
return $true
}
else {
Write-Fail "SHA256SUMS signature verification failed"
$Results.Signatures.Failed++
return $false
}
}
function Test-ArtifactSignatures {
if (-not (Test-CosignAvailable)) {
Write-Warn "Skipping artifact signature verification (cosign not available)"
return $true
}
Write-Verbose "Verifying artifact signatures..."
$failed = $false
Get-ChildItem -Path $ArtifactsDir -File | Where-Object {
$_.Extension -notin @('.sig', '.cert')
} | ForEach-Object {
$artifactPath = $_.FullName
$artifactName = $_.Name
# Skip if single artifact specified
if ($Artifact -and "artifacts/$artifactName" -ne $Artifact) { return }
$sigPath = "$artifactPath.sig"
if (-not (Test-Path $sigPath)) {
Write-Warn "No signature for: $artifactName"
return
}
$result = & cosign verify-blob `
--key $CosignPub `
--signature $sigPath `
$artifactPath 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Pass "Signature verified: $artifactName"
$Results.Signatures.Passed++
}
else {
Write-Fail "Signature verification failed: $artifactName"
$Results.Signatures.Failed++
$script:failed = $true
}
}
return -not $failed
}
function Test-Provenance {
if (-not (Test-CosignAvailable)) {
Write-Warn "Skipping provenance verification (cosign not available)"
return $true
}
Write-Verbose "Verifying provenance statements..."
if (-not (Test-Path $ProvenanceDir)) {
Write-Warn "No provenance statements found"
return $true
}
$failed = $false
Get-ChildItem -Path $ProvenanceDir -Filter "*.intoto.jsonl" | ForEach-Object {
$provPath = $_.FullName
$provName = $_.Name
$sigPath = "$provPath.sig"
if (-not (Test-Path $sigPath)) {
Write-Warn "No signature for provenance: $provName"
return
}
$result = & cosign verify-blob `
--key $CosignPub `
--signature $sigPath `
$provPath 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Pass "Provenance verified: $provName"
$Results.Provenance.Passed++
}
else {
Write-Fail "Provenance verification failed: $provName"
$Results.Provenance.Failed++
$script:failed = $true
}
}
return -not $failed
}
function Test-SbomSignatures {
if (-not (Test-CosignAvailable)) { return }
Write-Verbose "Verifying SBOM signatures..."
if (-not (Test-Path $SbomDir)) {
Write-Warn "No SBOMs found"
return
}
Get-ChildItem -Path $SbomDir -Filter "*.cdx.json" | ForEach-Object {
$sbomPath = $_.FullName
$sbomName = $_.Name
$sigPath = "$sbomPath.sig"
if (-not (Test-Path $sigPath)) {
Write-Warn "No signature for SBOM: $sbomName"
return
}
$result = & cosign verify-blob `
--key $CosignPub `
--signature $sigPath `
$sbomPath 2>&1
if ($LASTEXITCODE -eq 0) {
Write-Pass "SBOM signature verified: $sbomName"
$Results.Signatures.Passed++
}
else {
Write-Fail "SBOM signature verification failed: $sbomName"
$Results.Signatures.Failed++
}
}
}
function Write-Summary {
if ($Json) {
$status = "pass"
if ($Results.Checksums.Failed -gt 0) { $status = "fail" }
if ($Results.Signatures.Failed -gt 0) { $status = "fail" }
if ($Results.Provenance.Failed -gt 0) { $status = "fail" }
@{
status = $status
checksums = $Results.Checksums
signatures = $Results.Signatures
provenance = $Results.Provenance
} | ConvertTo-Json -Depth 3
return
}
Write-Host ""
Write-Host "========================================"
Write-Host " VERIFICATION SUMMARY"
Write-Host "========================================"
Write-Host "Checksums: " -NoNewline
Write-Host "$($Results.Checksums.Passed) passed" -ForegroundColor Green -NoNewline
Write-Host ", " -NoNewline
Write-Host "$($Results.Checksums.Failed) failed" -ForegroundColor Red
Write-Host "Signatures: " -NoNewline
Write-Host "$($Results.Signatures.Passed) passed" -ForegroundColor Green -NoNewline
Write-Host ", " -NoNewline
Write-Host "$($Results.Signatures.Failed) failed" -ForegroundColor Red
Write-Host "Provenance: " -NoNewline
Write-Host "$($Results.Provenance.Passed) passed" -ForegroundColor Green -NoNewline
Write-Host ", " -NoNewline
Write-Host "$($Results.Provenance.Failed) failed" -ForegroundColor Red
Write-Host "========================================"
if ($Results.Checksums.Failed -eq 0 -and
$Results.Signatures.Failed -eq 0 -and
$Results.Provenance.Failed -eq 0) {
Write-Host "All verifications passed!" -ForegroundColor Green
}
else {
Write-Host "Some verifications failed!" -ForegroundColor Red
}
}
# Main
try {
# Verify we're in an evidence pack directory
if (-not (Test-Path $CosignPub)) {
Write-Fail "cosign.pub not found - are you in an evidence pack directory?"
exit 4
}
if (-not (Test-Path $ChecksumsDir)) {
Write-Fail "checksums directory not found"
exit 4
}
# Run verifications
$checksumsOk = Test-Checksums
$checksumSigOk = Test-ChecksumsSignature
$artifactSigOk = Test-ArtifactSignatures
Test-SbomSignatures # Non-fatal
$provenanceOk = Test-Provenance
# Print summary
Write-Summary
# Exit with appropriate code
if ($Results.Checksums.Failed -gt 0) { exit 1 }
if ($Results.Signatures.Failed -gt 0) { exit 2 }
if ($Results.Provenance.Failed -gt 0) { exit 3 }
exit 0
}
catch {
Write-Fail $_.Exception.Message
exit 4
}

View File

@@ -0,0 +1,422 @@
#!/bin/sh
# Stella Ops Release Evidence Pack Verifier
# Verifies release integrity offline using POSIX tools + cosign
#
# Exit codes:
# 0 = All verifications passed
# 1 = Checksum verification failed
# 2 = Signature verification failed
# 3 = Provenance verification failed
# 4 = Configuration/usage error
#
# Usage: ./verify.sh [OPTIONS]
# --skip-rekor Skip Rekor proof verification (default in offline mode)
# --require-rekor Require Rekor proof verification
# --artifact NAME Verify only the specified artifact
# --verbose Show detailed output
# --json Output results as JSON
# --no-color Disable colored output
# --help Show this help message
set -eu
# Configuration
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
COSIGN_PUB="${SCRIPT_DIR}/cosign.pub"
CHECKSUMS_DIR="${SCRIPT_DIR}/checksums"
ARTIFACTS_DIR="${SCRIPT_DIR}/artifacts"
PROVENANCE_DIR="${SCRIPT_DIR}/provenance"
ATTESTATIONS_DIR="${SCRIPT_DIR}/attestations"
# Options
SKIP_REKOR=true
VERBOSE=false
JSON_OUTPUT=false
NO_COLOR=false
SINGLE_ARTIFACT=""
# Results tracking
CHECKSUMS_PASSED=0
CHECKSUMS_FAILED=0
SIGNATURES_PASSED=0
SIGNATURES_FAILED=0
PROVENANCE_PASSED=0
PROVENANCE_FAILED=0
# Colors
RED=""
GREEN=""
YELLOW=""
RESET=""
setup_colors() {
if [ "$NO_COLOR" = false ] && [ -t 1 ]; then
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[0;33m'
RESET='\033[0m'
fi
}
log_pass() {
if [ "$JSON_OUTPUT" = false ]; then
printf "${GREEN}[PASS]${RESET} %s\n" "$1"
fi
}
log_fail() {
if [ "$JSON_OUTPUT" = false ]; then
printf "${RED}[FAIL]${RESET} %s\n" "$1" >&2
fi
}
log_warn() {
if [ "$JSON_OUTPUT" = false ]; then
printf "${YELLOW}[WARN]${RESET} %s\n" "$1"
fi
}
log_info() {
if [ "$JSON_OUTPUT" = false ] && [ "$VERBOSE" = true ]; then
printf "[INFO] %s\n" "$1"
fi
}
usage() {
sed -n '2,18p' "$0" | sed 's/^# //'
exit 0
}
check_cosign() {
if command -v cosign >/dev/null 2>&1; then
return 0
else
log_warn "cosign not found - signature verification will be skipped"
log_warn "Install cosign: https://docs.sigstore.dev/cosign/installation/"
return 1
fi
}
verify_checksums() {
log_info "Verifying artifact checksums..."
if [ ! -f "${CHECKSUMS_DIR}/SHA256SUMS" ]; then
log_fail "SHA256SUMS file not found"
return 1
fi
cd "${SCRIPT_DIR}"
local failed=0
while IFS= read -r line; do
# Skip empty lines
[ -z "$line" ] && continue
hash=$(echo "$line" | awk '{print $1}')
file=$(echo "$line" | awk '{print $2}')
# If single artifact specified, skip others
if [ -n "$SINGLE_ARTIFACT" ] && [ "$file" != "$SINGLE_ARTIFACT" ]; then
continue
fi
if [ ! -f "$file" ]; then
log_fail "File not found: $file"
CHECKSUMS_FAILED=$((CHECKSUMS_FAILED + 1))
failed=1
continue
fi
# Compute hash
computed_hash=$(sha256sum "$file" | awk '{print $1}')
if [ "$computed_hash" = "$hash" ]; then
log_pass "Checksum verified: $file"
CHECKSUMS_PASSED=$((CHECKSUMS_PASSED + 1))
else
log_fail "Checksum mismatch: $file"
log_info " Expected: $hash"
log_info " Got: $computed_hash"
CHECKSUMS_FAILED=$((CHECKSUMS_FAILED + 1))
failed=1
fi
done < "${CHECKSUMS_DIR}/SHA256SUMS"
return $failed
}
verify_checksums_signature() {
if ! check_cosign; then
log_warn "Skipping checksums signature verification (cosign not available)"
return 0
fi
log_info "Verifying SHA256SUMS signature..."
if [ ! -f "${CHECKSUMS_DIR}/SHA256SUMS.sig" ]; then
log_warn "SHA256SUMS.sig not found - skipping signature verification"
return 0
fi
if cosign verify-blob \
--key "$COSIGN_PUB" \
--signature "${CHECKSUMS_DIR}/SHA256SUMS.sig" \
"${CHECKSUMS_DIR}/SHA256SUMS" 2>/dev/null; then
log_pass "SHA256SUMS signature verified"
SIGNATURES_PASSED=$((SIGNATURES_PASSED + 1))
return 0
else
log_fail "SHA256SUMS signature verification failed"
SIGNATURES_FAILED=$((SIGNATURES_FAILED + 1))
return 1
fi
}
verify_artifact_signatures() {
if ! check_cosign; then
log_warn "Skipping artifact signature verification (cosign not available)"
return 0
fi
log_info "Verifying artifact signatures..."
local failed=0
for artifact in "${ARTIFACTS_DIR}"/*; do
[ -f "$artifact" ] || continue
# Skip signature files
case "$artifact" in
*.sig|*.cert) continue ;;
esac
artifact_name=$(basename "$artifact")
# If single artifact specified, skip others
if [ -n "$SINGLE_ARTIFACT" ] && [ "artifacts/$artifact_name" != "$SINGLE_ARTIFACT" ]; then
continue
fi
sig_file="${artifact}.sig"
if [ ! -f "$sig_file" ]; then
log_warn "No signature for: $artifact_name"
continue
fi
if cosign verify-blob \
--key "$COSIGN_PUB" \
--signature "$sig_file" \
"$artifact" 2>/dev/null; then
log_pass "Signature verified: $artifact_name"
SIGNATURES_PASSED=$((SIGNATURES_PASSED + 1))
else
log_fail "Signature verification failed: $artifact_name"
SIGNATURES_FAILED=$((SIGNATURES_FAILED + 1))
failed=1
fi
done
return $failed
}
verify_provenance() {
if ! check_cosign; then
log_warn "Skipping provenance verification (cosign not available)"
return 0
fi
log_info "Verifying provenance statements..."
if [ ! -d "$PROVENANCE_DIR" ] || [ -z "$(ls -A "$PROVENANCE_DIR" 2>/dev/null)" ]; then
log_warn "No provenance statements found"
return 0
fi
local failed=0
for prov in "${PROVENANCE_DIR}"/*.intoto.jsonl; do
[ -f "$prov" ] || continue
prov_name=$(basename "$prov")
sig_file="${prov}.sig"
if [ ! -f "$sig_file" ]; then
log_warn "No signature for provenance: $prov_name"
continue
fi
if cosign verify-blob \
--key "$COSIGN_PUB" \
--signature "$sig_file" \
"$prov" 2>/dev/null; then
log_pass "Provenance verified: $prov_name"
PROVENANCE_PASSED=$((PROVENANCE_PASSED + 1))
else
log_fail "Provenance verification failed: $prov_name"
PROVENANCE_FAILED=$((PROVENANCE_FAILED + 1))
failed=1
fi
done
return $failed
}
verify_sbom_signatures() {
if ! check_cosign; then
return 0
fi
log_info "Verifying SBOM signatures..."
local sbom_dir="${SCRIPT_DIR}/sbom"
if [ ! -d "$sbom_dir" ] || [ -z "$(ls -A "$sbom_dir" 2>/dev/null)" ]; then
log_warn "No SBOMs found"
return 0
fi
for sbom in "${sbom_dir}"/*.cdx.json; do
[ -f "$sbom" ] || continue
sbom_name=$(basename "$sbom")
sig_file="${sbom}.sig"
if [ ! -f "$sig_file" ]; then
log_warn "No signature for SBOM: $sbom_name"
continue
fi
if cosign verify-blob \
--key "$COSIGN_PUB" \
--signature "$sig_file" \
"$sbom" 2>/dev/null; then
log_pass "SBOM signature verified: $sbom_name"
SIGNATURES_PASSED=$((SIGNATURES_PASSED + 1))
else
log_fail "SBOM signature verification failed: $sbom_name"
SIGNATURES_FAILED=$((SIGNATURES_FAILED + 1))
fi
done
}
output_json_results() {
local overall_status="pass"
[ $CHECKSUMS_FAILED -gt 0 ] && overall_status="fail"
[ $SIGNATURES_FAILED -gt 0 ] && overall_status="fail"
[ $PROVENANCE_FAILED -gt 0 ] && overall_status="fail"
cat <<EOF
{
"status": "$overall_status",
"checksums": {
"passed": $CHECKSUMS_PASSED,
"failed": $CHECKSUMS_FAILED
},
"signatures": {
"passed": $SIGNATURES_PASSED,
"failed": $SIGNATURES_FAILED
},
"provenance": {
"passed": $PROVENANCE_PASSED,
"failed": $PROVENANCE_FAILED
}
}
EOF
}
print_summary() {
if [ "$JSON_OUTPUT" = true ]; then
output_json_results
return
fi
echo ""
echo "========================================"
echo " VERIFICATION SUMMARY"
echo "========================================"
printf "Checksums: ${GREEN}%d passed${RESET}, ${RED}%d failed${RESET}\n" "$CHECKSUMS_PASSED" "$CHECKSUMS_FAILED"
printf "Signatures: ${GREEN}%d passed${RESET}, ${RED}%d failed${RESET}\n" "$SIGNATURES_PASSED" "$SIGNATURES_FAILED"
printf "Provenance: ${GREEN}%d passed${RESET}, ${RED}%d failed${RESET}\n" "$PROVENANCE_PASSED" "$PROVENANCE_FAILED"
echo "========================================"
if [ $CHECKSUMS_FAILED -eq 0 ] && [ $SIGNATURES_FAILED -eq 0 ] && [ $PROVENANCE_FAILED -eq 0 ]; then
printf "${GREEN}All verifications passed!${RESET}\n"
else
printf "${RED}Some verifications failed!${RESET}\n"
fi
}
main() {
# Parse arguments
while [ $# -gt 0 ]; do
case "$1" in
--skip-rekor)
SKIP_REKOR=true
shift
;;
--require-rekor)
SKIP_REKOR=false
shift
;;
--artifact)
SINGLE_ARTIFACT="$2"
shift 2
;;
--verbose)
VERBOSE=true
shift
;;
--json)
JSON_OUTPUT=true
shift
;;
--no-color)
NO_COLOR=true
shift
;;
--help|-h)
usage
;;
*)
echo "Unknown option: $1" >&2
exit 4
;;
esac
done
setup_colors
# Verify we're in an evidence pack directory
if [ ! -f "$COSIGN_PUB" ]; then
log_fail "cosign.pub not found - are you in an evidence pack directory?"
exit 4
fi
if [ ! -d "$CHECKSUMS_DIR" ]; then
log_fail "checksums directory not found"
exit 4
fi
local exit_code=0
# Run verifications
verify_checksums || exit_code=1
verify_checksums_signature || exit_code=2
verify_artifact_signatures || exit_code=2
verify_sbom_signatures || true # Non-fatal
verify_provenance || exit_code=3
# Print summary
print_summary
# Determine exit code based on failures
if [ $CHECKSUMS_FAILED -gt 0 ]; then
exit 1
elif [ $SIGNATURES_FAILED -gt 0 ]; then
exit 2
elif [ $PROVENANCE_FAILED -gt 0 ]; then
exit 3
fi
exit 0
}
main "$@"

View File

@@ -0,0 +1,435 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under the BUSL-1.1 license.
using System.Collections.Immutable;
using System.Globalization;
using System.Text.Json;
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
namespace StellaOps.Attestor.StandardPredicates.Validation;
/// <summary>
/// Validates SLSA v1.0 provenance predicates against the official specification.
/// </summary>
public sealed partial class SlsaSchemaValidator
{
private readonly ILogger<SlsaSchemaValidator> _logger;
private readonly SlsaValidationOptions _options;
// Regex for digest format: algorithm:hexstring
[GeneratedRegex(@"^[a-z0-9_-]+:[a-f0-9]+$", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
private static partial Regex DigestFormatRegex();
// Regex for RFC 3339 timestamp
[GeneratedRegex(@"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})$", RegexOptions.Compiled)]
private static partial Regex Rfc3339Regex();
public SlsaSchemaValidator(ILogger<SlsaSchemaValidator> logger, SlsaValidationOptions? options = null)
{
_logger = logger;
_options = options ?? SlsaValidationOptions.Default;
}
/// <summary>
/// Validates a SLSA v1.0 provenance predicate.
/// </summary>
public SlsaValidationResult Validate(JsonElement predicate)
{
var errors = new List<SlsaValidationError>();
var warnings = new List<SlsaValidationWarning>();
// 1. Validate buildDefinition (required)
if (!predicate.TryGetProperty("buildDefinition", out var buildDef))
{
errors.Add(new SlsaValidationError(
"SLSA_MISSING_BUILD_DEFINITION",
"Required field 'buildDefinition' is missing",
"buildDefinition"));
}
else
{
ValidateBuildDefinition(buildDef, errors, warnings);
}
// 2. Validate runDetails (required)
if (!predicate.TryGetProperty("runDetails", out var runDetails))
{
errors.Add(new SlsaValidationError(
"SLSA_MISSING_RUN_DETAILS",
"Required field 'runDetails' is missing",
"runDetails"));
}
else
{
ValidateRunDetails(runDetails, errors, warnings);
}
// 3. Evaluate SLSA level
var slsaLevel = EvaluateSlsaLevel(predicate);
// 4. Check minimum SLSA level
if (_options.MinimumSlsaLevel.HasValue && slsaLevel < _options.MinimumSlsaLevel.Value)
{
errors.Add(new SlsaValidationError(
"SLSA_LEVEL_TOO_LOW",
$"SLSA level {slsaLevel} is below minimum required level {_options.MinimumSlsaLevel.Value}",
""));
}
// 5. Check allowed builder IDs
if (_options.AllowedBuilderIds.Count > 0)
{
var builderId = GetBuilderId(predicate);
if (!string.IsNullOrEmpty(builderId) && !_options.AllowedBuilderIds.Contains(builderId))
{
errors.Add(new SlsaValidationError(
"SLSA_BUILDER_NOT_ALLOWED",
$"Builder ID '{builderId}' is not in the allowed list",
"runDetails.builder.id"));
}
}
var metadata = new SlsaPredicateMetadata
{
Format = "slsa-provenance",
Version = "1.0",
SlsaLevel = slsaLevel,
BuilderId = GetBuilderId(predicate),
BuildType = GetBuildType(predicate)
};
return new SlsaValidationResult(
IsValid: errors.Count == 0,
Errors: errors.ToImmutableArray(),
Warnings: warnings.ToImmutableArray(),
Metadata: metadata);
}
private void ValidateBuildDefinition(JsonElement buildDef, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
// buildType (required)
if (!buildDef.TryGetProperty("buildType", out var buildType) ||
buildType.ValueKind != JsonValueKind.String ||
string.IsNullOrWhiteSpace(buildType.GetString()))
{
errors.Add(new SlsaValidationError(
"SLSA_MISSING_BUILD_TYPE",
"Required field 'buildDefinition.buildType' is missing or empty",
"buildDefinition.buildType"));
}
else if (_options.Mode == SlsaValidationMode.Strict)
{
// In strict mode, buildType should be a valid URI
var buildTypeStr = buildType.GetString()!;
if (!Uri.TryCreate(buildTypeStr, UriKind.Absolute, out _))
{
warnings.Add(new SlsaValidationWarning(
"SLSA_BUILD_TYPE_NOT_URI",
$"buildType '{buildTypeStr}' is not a valid URI (recommended for SLSA compliance)",
"buildDefinition.buildType"));
}
}
// externalParameters (required, must be object)
if (!buildDef.TryGetProperty("externalParameters", out var extParams) ||
extParams.ValueKind != JsonValueKind.Object)
{
errors.Add(new SlsaValidationError(
"SLSA_MISSING_EXTERNAL_PARAMETERS",
"Required field 'buildDefinition.externalParameters' is missing or not an object",
"buildDefinition.externalParameters"));
}
// resolvedDependencies (optional but recommended)
if (buildDef.TryGetProperty("resolvedDependencies", out var deps))
{
if (deps.ValueKind != JsonValueKind.Array)
{
errors.Add(new SlsaValidationError(
"SLSA_INVALID_RESOLVED_DEPENDENCIES",
"'buildDefinition.resolvedDependencies' must be an array",
"buildDefinition.resolvedDependencies"));
}
else
{
ValidateResourceDescriptors(deps, "buildDefinition.resolvedDependencies", errors, warnings);
}
}
}
private void ValidateRunDetails(JsonElement runDetails, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
// builder (required)
if (!runDetails.TryGetProperty("builder", out var builder) ||
builder.ValueKind != JsonValueKind.Object)
{
errors.Add(new SlsaValidationError(
"SLSA_MISSING_BUILDER",
"Required field 'runDetails.builder' is missing or not an object",
"runDetails.builder"));
}
else
{
ValidateBuilder(builder, errors, warnings);
}
// metadata (optional but recommended)
if (runDetails.TryGetProperty("metadata", out var metadata))
{
ValidateMetadata(metadata, errors, warnings);
}
// byproducts (optional)
if (runDetails.TryGetProperty("byproducts", out var byproducts))
{
if (byproducts.ValueKind != JsonValueKind.Array)
{
errors.Add(new SlsaValidationError(
"SLSA_INVALID_BYPRODUCTS",
"'runDetails.byproducts' must be an array",
"runDetails.byproducts"));
}
else
{
ValidateResourceDescriptors(byproducts, "runDetails.byproducts", errors, warnings);
}
}
}
private void ValidateBuilder(JsonElement builder, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
// id (required)
if (!builder.TryGetProperty("id", out var id) ||
id.ValueKind != JsonValueKind.String ||
string.IsNullOrWhiteSpace(id.GetString()))
{
errors.Add(new SlsaValidationError(
"SLSA_MISSING_BUILDER_ID",
"Required field 'runDetails.builder.id' is missing or empty",
"runDetails.builder.id"));
}
else if (_options.Mode == SlsaValidationMode.Strict && _options.RequireValidBuilderIdUri)
{
var idStr = id.GetString()!;
if (!Uri.TryCreate(idStr, UriKind.Absolute, out _))
{
errors.Add(new SlsaValidationError(
"SLSA_INVALID_BUILDER_ID_FORMAT",
$"builder.id must be a valid URI in strict mode, got: '{idStr}'",
"runDetails.builder.id"));
}
}
}
private void ValidateMetadata(JsonElement metadata, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
// invocationId (optional but recommended)
// startedOn (optional, RFC 3339)
if (metadata.TryGetProperty("startedOn", out var startedOn))
{
ValidateTimestamp(startedOn, "runDetails.metadata.startedOn", errors, warnings);
}
// finishedOn (optional, RFC 3339)
if (metadata.TryGetProperty("finishedOn", out var finishedOn))
{
ValidateTimestamp(finishedOn, "runDetails.metadata.finishedOn", errors, warnings);
}
}
private void ValidateTimestamp(JsonElement timestamp, string path, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
if (timestamp.ValueKind != JsonValueKind.String)
{
errors.Add(new SlsaValidationError(
"SLSA_INVALID_TIMESTAMP_TYPE",
$"Timestamp at '{path}' must be a string",
path));
return;
}
var value = timestamp.GetString()!;
if (_options.Mode == SlsaValidationMode.Strict && _options.RequireTimestampFormat)
{
if (!Rfc3339Regex().IsMatch(value))
{
errors.Add(new SlsaValidationError(
"SLSA_INVALID_TIMESTAMP_FORMAT",
$"Timestamp at '{path}' is not RFC 3339 format: '{value}'",
path));
}
}
else
{
// Standard mode: just warn if not parseable
if (!DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out _))
{
warnings.Add(new SlsaValidationWarning(
"SLSA_TIMESTAMP_PARSE_WARNING",
$"Timestamp at '{path}' may not be valid: '{value}'",
path));
}
}
}
private void ValidateResourceDescriptors(JsonElement descriptors, string basePath, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
var index = 0;
foreach (var descriptor in descriptors.EnumerateArray())
{
var path = $"{basePath}[{index}]";
// At least one of uri, name, or digest should be present
var hasUri = descriptor.TryGetProperty("uri", out _);
var hasName = descriptor.TryGetProperty("name", out _);
var hasDigest = descriptor.TryGetProperty("digest", out var digest);
if (!hasUri && !hasName && !hasDigest)
{
warnings.Add(new SlsaValidationWarning(
"SLSA_EMPTY_RESOURCE_DESCRIPTOR",
$"Resource descriptor at '{path}' has no uri, name, or digest",
path));
}
// Validate digest format
if (hasDigest && digest.ValueKind == JsonValueKind.Object)
{
ValidateDigests(digest, $"{path}.digest", errors, warnings);
}
index++;
}
}
private void ValidateDigests(JsonElement digests, string path, List<SlsaValidationError> errors, List<SlsaValidationWarning> warnings)
{
foreach (var prop in digests.EnumerateObject())
{
var algorithm = prop.Name;
var value = prop.Value.GetString() ?? "";
// Check algorithm is approved
if (_options.Mode == SlsaValidationMode.Strict &&
_options.RequireApprovedDigestAlgorithms &&
!_options.ApprovedDigestAlgorithms.Contains(algorithm.ToLowerInvariant()))
{
errors.Add(new SlsaValidationError(
"SLSA_UNAPPROVED_DIGEST_ALGORITHM",
$"Digest algorithm '{algorithm}' at '{path}' is not in the approved list",
$"{path}.{algorithm}"));
}
// Check value is hex string
if (!IsHexString(value))
{
errors.Add(new SlsaValidationError(
"SLSA_INVALID_DIGEST_VALUE",
$"Digest value at '{path}.{algorithm}' is not a valid hex string",
$"{path}.{algorithm}"));
}
}
}
private static bool IsHexString(string value)
{
if (string.IsNullOrEmpty(value))
return false;
return value.All(c => char.IsAsciiHexDigit(c));
}
private int EvaluateSlsaLevel(JsonElement predicate)
{
// Basic heuristics for SLSA level evaluation
// This is a simplified version - full evaluation would require policy configuration
var level = 1; // Base level if we have any provenance
// Check for builder info
var hasBuilder = predicate.TryGetProperty("runDetails", out var runDetails) &&
runDetails.TryGetProperty("builder", out var builder) &&
builder.TryGetProperty("id", out _);
if (!hasBuilder)
return 0;
// Level 2: Has resolved dependencies with digests
if (predicate.TryGetProperty("buildDefinition", out var buildDef) &&
buildDef.TryGetProperty("resolvedDependencies", out var deps) &&
deps.ValueKind == JsonValueKind.Array &&
deps.GetArrayLength() > 0)
{
var hasDigests = deps.EnumerateArray()
.Any(d => d.TryGetProperty("digest", out _));
if (hasDigests)
level = 2;
}
// Level 3: Would require verification of isolated build, etc.
// This requires external policy configuration
return level;
}
private static string? GetBuilderId(JsonElement predicate)
{
if (predicate.TryGetProperty("runDetails", out var runDetails) &&
runDetails.TryGetProperty("builder", out var builder) &&
builder.TryGetProperty("id", out var id))
{
return id.GetString();
}
return null;
}
private static string? GetBuildType(JsonElement predicate)
{
if (predicate.TryGetProperty("buildDefinition", out var buildDef) &&
buildDef.TryGetProperty("buildType", out var buildType))
{
return buildType.GetString();
}
return null;
}
}
/// <summary>
/// Result of SLSA predicate validation.
/// </summary>
public sealed record SlsaValidationResult(
bool IsValid,
ImmutableArray<SlsaValidationError> Errors,
ImmutableArray<SlsaValidationWarning> Warnings,
SlsaPredicateMetadata Metadata);
/// <summary>
/// Validation error.
/// </summary>
public sealed record SlsaValidationError(
string Code,
string Message,
string Path);
/// <summary>
/// Validation warning.
/// </summary>
public sealed record SlsaValidationWarning(
string Code,
string Message,
string Path);
/// <summary>
/// Metadata extracted from SLSA predicate.
/// </summary>
public sealed record SlsaPredicateMetadata
{
public required string Format { get; init; }
public required string Version { get; init; }
public int SlsaLevel { get; init; }
public string? BuilderId { get; init; }
public string? BuildType { get; init; }
}

View File

@@ -0,0 +1,94 @@
// Copyright (c) StellaOps. All rights reserved.
// Licensed under the BUSL-1.1 license.
using System.Collections.Immutable;
namespace StellaOps.Attestor.StandardPredicates.Validation;
/// <summary>
/// Options for SLSA provenance validation.
/// </summary>
public sealed record SlsaValidationOptions
{
/// <summary>
/// Default validation options (standard mode).
/// </summary>
public static SlsaValidationOptions Default { get; } = new();
/// <summary>
/// Strict validation options with all checks enabled.
/// </summary>
public static SlsaValidationOptions Strict { get; } = new()
{
Mode = SlsaValidationMode.Strict,
RequireApprovedDigestAlgorithms = true,
RequireValidBuilderIdUri = true,
RequireTimestampFormat = true,
MinimumSlsaLevel = 2
};
/// <summary>
/// Validation mode: Standard (schema only) or Strict (schema + additional checks).
/// </summary>
public SlsaValidationMode Mode { get; init; } = SlsaValidationMode.Standard;
/// <summary>
/// Minimum SLSA level to accept. Rejects predicates below this level.
/// </summary>
public int? MinimumSlsaLevel { get; init; }
/// <summary>
/// Required builder IDs. Rejects predicates from unknown builders.
/// Empty set means all builders are allowed.
/// </summary>
public ImmutableHashSet<string> AllowedBuilderIds { get; init; } = [];
/// <summary>
/// Whether to require all digest algorithms be from the approved set.
/// </summary>
public bool RequireApprovedDigestAlgorithms { get; init; }
/// <summary>
/// Whether to require builder.id to be a valid URI.
/// </summary>
public bool RequireValidBuilderIdUri { get; init; }
/// <summary>
/// Whether to require timestamps to be RFC 3339 format.
/// </summary>
public bool RequireTimestampFormat { get; init; }
/// <summary>
/// Approved digest algorithms.
/// </summary>
public ImmutableHashSet<string> ApprovedDigestAlgorithms { get; init; } =
[
"sha256",
"sha384",
"sha512",
"sha3-256",
"sha3-384",
"sha3-512",
"gitCommit" // Special case for git refs
];
}
/// <summary>
/// SLSA validation mode.
/// </summary>
public enum SlsaValidationMode
{
/// <summary>
/// Validates presence of required fields only.
/// </summary>
Standard,
/// <summary>
/// Validates against full SLSA v1.0 requirements:
/// - builder.id must be valid URI
/// - All digests must use approved algorithms
/// - Timestamps must be RFC 3339
/// - Resource descriptors must have required fields
/// </summary>
Strict
}