feat: Add DigestUpsertRequest and LockEntity models
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled

- Introduced DigestUpsertRequest for handling digest upsert requests with properties like ChannelId, Recipient, DigestKey, Events, and CollectUntil.
- Created LockEntity to represent a lightweight distributed lock entry with properties such as Id, TenantId, Resource, Owner, ExpiresAt, and CreatedAt.

feat: Implement ILockRepository interface and LockRepository class

- Defined ILockRepository interface with methods for acquiring and releasing locks.
- Implemented LockRepository class with methods to try acquiring a lock and releasing it, using SQL for upsert operations.

feat: Add SurfaceManifestPointer record for manifest pointers

- Introduced SurfaceManifestPointer to represent a minimal pointer to a Surface.FS manifest associated with an image digest.

feat: Create PolicySimulationInputLock and related validation logic

- Added PolicySimulationInputLock record to describe policy simulation inputs and expected digests.
- Implemented validation logic for policy simulation inputs, including checks for digest drift and shadow mode requirements.

test: Add unit tests for ReplayVerificationService and ReplayVerifier

- Created ReplayVerificationServiceTests to validate the behavior of the ReplayVerificationService under various scenarios.
- Developed ReplayVerifierTests to ensure the correctness of the ReplayVerifier logic.

test: Implement PolicySimulationInputLockValidatorTests

- Added tests for PolicySimulationInputLockValidator to verify the validation logic against expected inputs and conditions.

chore: Add cosign key example and signing scripts

- Included a placeholder cosign key example for development purposes.
- Added a script for signing Signals artifacts using cosign with support for both v2 and v3.

chore: Create script for uploading evidence to the evidence locker

- Developed a script to upload evidence to the evidence locker, ensuring required environment variables are set.
This commit is contained in:
StellaOps Bot
2025-12-03 07:51:50 +02:00
parent 37cba83708
commit e923880694
171 changed files with 6567 additions and 2952 deletions

View File

@@ -23,6 +23,8 @@ public static class AirGapControllerServiceCollectionExtensions
services.AddSingleton<AirGapStateService>();
services.AddSingleton<TufMetadataValidator>();
services.AddSingleton<RootRotationPolicy>();
services.AddSingleton<ReplayVerifier>();
services.AddSingleton<ReplayVerificationService>();
services.AddSingleton<IAirGapStateStore>(sp =>
{

View File

@@ -11,6 +11,7 @@ internal static class AirGapEndpoints
{
private const string StatusScope = "airgap:status:read";
private const string SealScope = "airgap:seal";
private const string VerifyScope = "airgap:verify";
public static RouteGroupBuilder MapAirGapEndpoints(this IEndpointRouteBuilder app)
{
@@ -29,6 +30,10 @@ internal static class AirGapEndpoints
.RequireScope(SealScope)
.WithName("AirGapUnseal");
group.MapPost("/verify", HandleVerify)
.RequireScope(VerifyScope)
.WithName("AirGapVerify");
return group;
}
@@ -87,6 +92,24 @@ internal static class AirGapEndpoints
return Results.Ok(AirGapStatusResponse.FromStatus(status));
}
private static async Task<IResult> HandleVerify(
VerifyRequest request,
ReplayVerificationService verifier,
TimeProvider timeProvider,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenant(httpContext);
var now = timeProvider.GetUtcNow();
var result = await verifier.VerifyAsync(tenantId, request, now, cancellationToken);
if (!result.IsValid)
{
return Results.BadRequest(new VerifyResponse(false, result.Reason));
}
return Results.Ok(new VerifyResponse(true, result.Reason));
}
private static string ResolveTenant(HttpContext httpContext)
{
if (httpContext.Request.Headers.TryGetValue("x-tenant-id", out var tenantHeader) && !string.IsNullOrWhiteSpace(tenantHeader))

View File

@@ -0,0 +1,23 @@
using StellaOps.AirGap.Importer.Contracts;
namespace StellaOps.AirGap.Controller.Endpoints.Contracts;
public sealed record VerifyRequest
{
public ReplayDepth Depth { get; init; } = ReplayDepth.FullRecompute;
public string ManifestSha256 { get; init; } = string.Empty;
public string BundleSha256 { get; init; } = string.Empty;
public string? ComputedManifestSha256 { get; init; }
= null;
public string? ComputedBundleSha256 { get; init; }
= null;
public DateTimeOffset ManifestCreatedAt { get; init; }
= DateTimeOffset.MinValue;
public int StalenessWindowHours { get; init; } = 0;
public string? BundlePolicyHash { get; init; }
= null;
public string? SealedPolicyHash { get; init; }
= null;
}
public sealed record VerifyResponse(bool Valid, string Reason);

View File

@@ -0,0 +1,39 @@
using StellaOps.AirGap.Controller.Endpoints.Contracts;
using StellaOps.AirGap.Importer.Contracts;
using StellaOps.AirGap.Importer.Validation;
namespace StellaOps.AirGap.Controller.Services;
public sealed class ReplayVerificationService
{
private readonly AirGapStateService _stateService;
private readonly ReplayVerifier _verifier;
public ReplayVerificationService(AirGapStateService stateService, ReplayVerifier verifier)
{
_stateService = stateService;
_verifier = verifier;
}
public async Task<ReplayVerificationResult> VerifyAsync(
string tenantId,
VerifyRequest request,
DateTimeOffset nowUtc,
CancellationToken cancellationToken = default)
{
var status = await _stateService.GetStatusAsync(tenantId, nowUtc, cancellationToken);
var replayRequest = new ReplayVerificationRequest(
request.ManifestSha256,
request.BundleSha256,
request.ComputedManifestSha256 ?? request.ManifestSha256,
request.ComputedBundleSha256 ?? request.BundleSha256,
request.ManifestCreatedAt,
request.StalenessWindowHours,
request.BundlePolicyHash,
request.SealedPolicyHash ?? status.State.PolicyHash,
request.Depth);
return _verifier.Verify(replayRequest, nowUtc);
}
}

View File

@@ -0,0 +1,14 @@
using System.Text.Json.Serialization;
namespace StellaOps.AirGap.Importer.Contracts;
/// <summary>
/// Replay enforcement depth for offline kit verification.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ReplayDepth
{
HashOnly,
FullRecompute,
PolicyFreeze
}

View File

@@ -0,0 +1,21 @@
namespace StellaOps.AirGap.Importer.Contracts;
/// <summary>
/// Inputs required to enforce replay depth for an offline kit.
/// </summary>
public sealed record ReplayVerificationRequest(
string ExpectedManifestSha256,
string ExpectedBundleSha256,
string ComputedManifestSha256,
string ComputedBundleSha256,
DateTimeOffset ManifestCreatedAt,
int StalenessWindowHours,
string? BundlePolicyHash,
string? SealedPolicyHash,
ReplayDepth Depth);
public sealed record ReplayVerificationResult(bool IsValid, string Reason)
{
public static ReplayVerificationResult Success(string reason = "ok") => new(true, reason);
public static ReplayVerificationResult Failure(string reason) => new(false, reason);
}

View File

@@ -0,0 +1,60 @@
using StellaOps.AirGap.Importer.Contracts;
namespace StellaOps.AirGap.Importer.Validation;
/// <summary>
/// Enforces replay depth semantics for offline kit validation.
/// </summary>
public sealed class ReplayVerifier
{
public ReplayVerificationResult Verify(ReplayVerificationRequest request, DateTimeOffset nowUtc)
{
if (string.IsNullOrWhiteSpace(request.ExpectedManifestSha256) ||
string.IsNullOrWhiteSpace(request.ExpectedBundleSha256) ||
string.IsNullOrWhiteSpace(request.ComputedManifestSha256) ||
string.IsNullOrWhiteSpace(request.ComputedBundleSha256))
{
return ReplayVerificationResult.Failure("hash-missing");
}
if (!string.Equals(request.ExpectedManifestSha256, request.ComputedManifestSha256, StringComparison.OrdinalIgnoreCase))
{
return ReplayVerificationResult.Failure("manifest-hash-drift");
}
if (!string.Equals(request.ExpectedBundleSha256, request.ComputedBundleSha256, StringComparison.OrdinalIgnoreCase))
{
return ReplayVerificationResult.Failure("bundle-hash-drift");
}
if (request.StalenessWindowHours >= 0)
{
var age = nowUtc - request.ManifestCreatedAt;
if (age > TimeSpan.FromHours(request.StalenessWindowHours))
{
return ReplayVerificationResult.Failure("manifest-stale");
}
}
if (request.Depth == ReplayDepth.PolicyFreeze)
{
if (string.IsNullOrWhiteSpace(request.SealedPolicyHash) || string.IsNullOrWhiteSpace(request.BundlePolicyHash))
{
return ReplayVerificationResult.Failure("policy-hash-missing");
}
if (!string.Equals(request.BundlePolicyHash, request.SealedPolicyHash, StringComparison.OrdinalIgnoreCase))
{
return ReplayVerificationResult.Failure("policy-hash-drift");
}
}
return request.Depth switch
{
ReplayDepth.HashOnly => ReplayVerificationResult.Success("hash-only-passed"),
ReplayDepth.FullRecompute => ReplayVerificationResult.Success("full-recompute-passed"),
ReplayDepth.PolicyFreeze => ReplayVerificationResult.Success("policy-freeze-passed"),
_ => ReplayVerificationResult.Failure("unknown-depth")
};
}
}

View File

@@ -0,0 +1,196 @@
#!/usr/bin/env bash
set -euo pipefail
# Offline verifier covering manifest/bundle digests, staleness, AV report, and replay depth.
# Usage:
# verify-kit.sh --manifest path/to/manifest.json --bundle path/to/bundle.tar.gz \
# [--signature manifest.sig --pubkey manifest.pub.pem] \
# [--av-report reports/av-report.json] [--receipt receipts/ingress.json] \
# [--sealed-policy-hash <sha256>] [--expected-graph-sha <sha256>] \
# [--depth hash-only|full-recompute|policy-freeze] [--now 2025-12-02T00:00:00Z]
usage() {
echo "Usage: $0 --manifest <path> --bundle <path> [--signature <sig> --pubkey <pem>] [--av-report <path>] [--receipt <path>] [--sealed-policy-hash <sha>] [--expected-graph-sha <sha>] [--depth <hash-only|full-recompute|policy-freeze>] [--now <iso8601>]" >&2
exit 64
}
require() {
if ! command -v "$1" >/dev/null; then
echo "$1 is required" >&2
exit 2
fi
}
calc_sha() {
sha256sum "$1" | awk '{print $1}'
}
normalize_depth() {
local raw="${1:-}"
local lowered
lowered=$(echo "$raw" | tr '[:upper:]' '[:lower:]')
lowered=${lowered//_/}
lowered=${lowered// /}
case "$lowered" in
hash-only|hashonly) echo "hash-only" ;;
fullrecompute|full|full-recompute) echo "full-recompute" ;;
policyfreeze|policy-freeze) echo "policy-freeze" ;;
*) echo "$lowered" ;;
esac
}
manifest=""
bundle=""
signature=""
pubkey=""
av_report=""
receipt=""
sealed_policy_hash=${SEALED_POLICY_HASH:-}
expected_graph_sha=""
depth=""
now_ts=""
while [[ $# -gt 0 ]]; do
case "$1" in
--manifest) manifest=${2:-}; shift ;;
--bundle) bundle=${2:-}; shift ;;
--signature) signature=${2:-}; shift ;;
--pubkey) pubkey=${2:-}; shift ;;
--av-report) av_report=${2:-}; shift ;;
--receipt) receipt=${2:-}; shift ;;
--sealed-policy-hash) sealed_policy_hash=${2:-}; shift ;;
--expected-graph-sha) expected_graph_sha=${2:-}; shift ;;
--depth) depth=${2:-}; shift ;;
--now) now_ts=${2:-}; shift ;;
*) usage ;;
esac
shift
done
[[ -z "$manifest" || -z "$bundle" ]] && usage
require jq
require sha256sum
require python3
require realpath
manifest_dir=$(cd "$(dirname "$manifest")" && pwd)
manifest_path=$(cd "$manifest_dir" && realpath "$manifest")
bundle_path=$(cd "$(dirname "$bundle")" && realpath "$bundle")
expected_manifest_hash=$(jq -r '.hashes.manifestSha256' "$manifest_path")
expected_bundle_hash=$(jq -r '.hashes.bundleSha256' "$manifest_path")
computed_manifest_hash=$(calc_sha "$manifest_path")
computed_bundle_hash=$(calc_sha "$bundle_path")
if [[ "$expected_manifest_hash" != "$computed_manifest_hash" ]]; then
echo "manifest hash mismatch: expected=$expected_manifest_hash computed=$computed_manifest_hash" >&2
exit 3
fi
if [[ "$expected_bundle_hash" != "$computed_bundle_hash" ]]; then
echo "bundle hash mismatch: expected=$expected_bundle_hash computed=$computed_bundle_hash" >&2
exit 4
fi
if [[ -n "$signature" && -n "$pubkey" ]]; then
require openssl
openssl dgst -sha256 -verify "$pubkey" -signature "$signature" "$manifest_path" >/dev/null
fi
manifest_replay_policy=$(jq -r '.replayPolicy // "full-recompute"' "$manifest_path")
depth=$(normalize_depth "${depth:-$manifest_replay_policy}")
case "$depth" in
hash-only|full-recompute|policy-freeze) ;;
*) echo "invalid depth: $depth" >&2; exit 14 ;;
esac
now_ts=${now_ts:-$(date -u +"%Y-%m-%dT%H:%M:%SZ")}
created_at=$(jq -r '.createdAt' "$manifest_path")
staleness_window=$(jq -r '.stalenessWindowHours' "$manifest_path")
age_hours=$(python3 - <<'PY'
import sys, datetime
created = sys.argv[1].replace('Z', '+00:00')
now = sys.argv[2].replace('Z', '+00:00')
c = datetime.datetime.fromisoformat(created)
n = datetime.datetime.fromisoformat(now)
print((n - c).total_seconds() / 3600)
PY
"$created_at" "$now_ts")
is_stale=$(python3 - <<'PY'
import sys
age=float(sys.argv[1])
win=int(sys.argv[2])
print("true" if age > win else "false")
PY
"$age_hours" "$staleness_window")
if [[ "$is_stale" == "true" ]]; then
echo "manifest stale: age_hours=$age_hours window=$staleness_window" >&2
exit 5
fi
# AV/YARA validation
av_status=$(jq -r '.avScan.status // "not_run"' "$manifest_path")
if [[ "$av_status" == "findings" ]]; then
echo "AV scan reported findings" >&2
exit 6
fi
av_report_sha=$(jq -r '.avScan.reportSha256 // ""' "$manifest_path")
if [[ -n "$av_report_sha" ]]; then
[[ -z "$av_report" ]] && { echo "av report required for validation" >&2; exit 7; }
computed_av_sha=$(calc_sha "$av_report")
if [[ "$computed_av_sha" != "$av_report_sha" ]]; then
echo "AV report hash mismatch: expected=$av_report_sha computed=$computed_av_sha" >&2
exit 8
fi
fi
# Chunk integrity (full-recompute/policy-freeze)
if [[ "$depth" != "hash-only" ]]; then
while IFS= read -r line; do
chunk_path=$(echo "$line" | awk '{print $1}')
chunk_sha=$(echo "$line" | awk '{print $2}')
full_path="$manifest_dir/$chunk_path"
if [[ ! -f "$full_path" ]]; then
echo "chunk missing: $full_path" >&2
exit 9
fi
computed_chunk_sha=$(calc_sha "$full_path")
if [[ "$computed_chunk_sha" != "$chunk_sha" ]]; then
echo "chunk hash mismatch for $chunk_path" >&2
exit 10
fi
done < <(jq -r '.chunks[] | "\(.path) \(.sha256)"' "$manifest_path")
fi
if [[ -n "$expected_graph_sha" ]]; then
graph_sha=$(jq -r '.chunks[] | select(.kind=="graph") | .sha256' "$manifest_path" | head -n1)
if [[ "$graph_sha" != "$expected_graph_sha" ]]; then
echo "graph hash mismatch: expected=$expected_graph_sha manifest=$graph_sha" >&2
exit 11
fi
fi
if [[ "$depth" == "policy-freeze" ]]; then
if [[ -z "$sealed_policy_hash" ]]; then
echo "policy-freeze requires --sealed-policy-hash" >&2
exit 12
fi
policy_match=$(jq -r --arg h "$sealed_policy_hash" '[.policies[].sha256] | index($h) != null' "$manifest_path")
if [[ "$policy_match" != "true" ]]; then
echo "policy hash drift: sealed=$sealed_policy_hash not present in manifest" >&2
exit 13
fi
fi
if [[ -n "$receipt" ]]; then
"$(dirname "$0")/verify-receipt.sh" "$receipt" "$manifest_path" "$bundle_path"
fi
echo "Offline kit verification passed (depth=$depth, age_hours=$age_hours)."

View File

@@ -0,0 +1,36 @@
#!/usr/bin/env bash
set -euo pipefail
# Verify an AirGap receipt against manifest/bundle hashes and optional DSSE signature digest.
# Usage: verify-receipt.sh receipt.json manifest.json bundle.tar.gz
receipt=${1:?receipt path required}
manifest=${2:?manifest path required}
bundle=${3:?bundle path required}
if ! command -v jq >/dev/null; then
echo "jq is required" >&2
exit 2
fi
sha256_file() {
sha256sum "$1" | awk '{print $1}'
}
receipt_manifest_hash=$(jq -r '.hashes.manifestSha256' "$receipt")
receipt_bundle_hash=$(jq -r '.hashes.bundleSha256' "$receipt")
calc_manifest_hash=$(sha256_file "$manifest")
calc_bundle_hash=$(sha256_file "$bundle")
if [[ "$receipt_manifest_hash" != "$calc_manifest_hash" ]]; then
echo "manifest hash mismatch: receipt=$receipt_manifest_hash calc=$calc_manifest_hash" >&2
exit 3
fi
if [[ "$receipt_bundle_hash" != "$calc_bundle_hash" ]]; then
echo "bundle hash mismatch: receipt=$receipt_bundle_hash calc=$calc_bundle_hash" >&2
exit 4
fi
echo "Receipt hashes match manifest and bundle."