Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled
- Introduced DigestUpsertRequest for handling digest upsert requests with properties like ChannelId, Recipient, DigestKey, Events, and CollectUntil. - Created LockEntity to represent a lightweight distributed lock entry with properties such as Id, TenantId, Resource, Owner, ExpiresAt, and CreatedAt. feat: Implement ILockRepository interface and LockRepository class - Defined ILockRepository interface with methods for acquiring and releasing locks. - Implemented LockRepository class with methods to try acquiring a lock and releasing it, using SQL for upsert operations. feat: Add SurfaceManifestPointer record for manifest pointers - Introduced SurfaceManifestPointer to represent a minimal pointer to a Surface.FS manifest associated with an image digest. feat: Create PolicySimulationInputLock and related validation logic - Added PolicySimulationInputLock record to describe policy simulation inputs and expected digests. - Implemented validation logic for policy simulation inputs, including checks for digest drift and shadow mode requirements. test: Add unit tests for ReplayVerificationService and ReplayVerifier - Created ReplayVerificationServiceTests to validate the behavior of the ReplayVerificationService under various scenarios. - Developed ReplayVerifierTests to ensure the correctness of the ReplayVerifier logic. test: Implement PolicySimulationInputLockValidatorTests - Added tests for PolicySimulationInputLockValidator to verify the validation logic against expected inputs and conditions. chore: Add cosign key example and signing scripts - Included a placeholder cosign key example for development purposes. - Added a script for signing Signals artifacts using cosign with support for both v2 and v3. chore: Create script for uploading evidence to the evidence locker - Developed a script to upload evidence to the evidence locker, ensuring required environment variables are set.
443 lines
16 KiB
Bash
443 lines
16 KiB
Bash
#!/usr/bin/env bash
|
|
set -euo pipefail
|
|
ROOT=$(cd "$(dirname "$0")/../../.." && pwd)
|
|
OUT="$ROOT/out/mirror/thin"
|
|
STAGE="$OUT/stage-v1"
|
|
CREATED=${CREATED:-"2025-11-23T00:00:00Z"}
|
|
TENANT_SCOPE=${TENANT_SCOPE:-"tenant-demo"}
|
|
ENV_SCOPE=${ENV_SCOPE:-"lab"}
|
|
CHUNK_SIZE=${CHUNK_SIZE:-5242880}
|
|
CHECKPOINT_FRESHNESS=${CHECKPOINT_FRESHNESS:-86400}
|
|
PQ_CO_SIGN_REQUIRED=${PQ_CO_SIGN_REQUIRED:-0}
|
|
export STAGE CREATED TENANT_SCOPE ENV_SCOPE CHUNK_SIZE CHECKPOINT_FRESHNESS PQ_CO_SIGN_REQUIRED
|
|
export MAKE_HASH SIGN_HASH SIGN_KEY_ID
|
|
MAKE_HASH=$(sha256sum "$ROOT/src/Mirror/StellaOps.Mirror.Creator/make-thin-v1.sh" | awk '{print $1}')
|
|
SIGN_HASH=$(sha256sum "$ROOT/scripts/mirror/sign_thin_bundle.py" | awk '{print $1}')
|
|
SIGN_KEY_ID=${SIGN_KEY_ID:-pending}
|
|
if [[ -n "${SIGN_KEY:-}" && -f "${SIGN_KEY%.pem}.pub" ]]; then
|
|
SIGN_KEY_ID=$(sha256sum "${SIGN_KEY%.pem}.pub" | awk '{print $1}')
|
|
fi
|
|
|
|
mkdir -p "$STAGE/layers" "$STAGE/indexes"
|
|
|
|
# 1) Seed deterministic content
|
|
cat > "$STAGE/layers/observations.ndjson" <<'DATA'
|
|
{"id":"obs-001","purl":"pkg:nuget/Newtonsoft.Json@13.0.3","advisory":"CVE-2025-0001","severity":"medium","source":"vendor-a","timestamp":"2025-11-01T00:00:00Z"}
|
|
{"id":"obs-002","purl":"pkg:npm/lodash@4.17.21","advisory":"CVE-2024-9999","severity":"high","source":"vendor-b","timestamp":"2025-10-15T00:00:00Z"}
|
|
DATA
|
|
|
|
if [[ -n "${TIME_ANCHOR_FILE:-}" && -f "${TIME_ANCHOR_FILE}" ]]; then
|
|
cp "${TIME_ANCHOR_FILE}" "$STAGE/layers/time-anchor.json"
|
|
else
|
|
cat > "$STAGE/layers/time-anchor.json" <<'DATA'
|
|
{
|
|
"authority": "stellaops-airgap-test",
|
|
"generatedAt": "2025-11-01T00:00:00Z",
|
|
"anchors": [
|
|
{
|
|
"type": "roughtime",
|
|
"version": "1",
|
|
"publicKey": "base64:TEST_KEY_001",
|
|
"signature": "base64:TEST_SIG_001",
|
|
"timestamp": "2025-11-01T00:00:00Z",
|
|
"maxDistanceSeconds": 5
|
|
}
|
|
]
|
|
}
|
|
|
|
# Optional: sign time anchor early so bundle meta can record DSSE hash
|
|
if [[ -n "${SIGN_KEY:-}" ]]; then
|
|
python - <<'PY'
|
|
import base64, json, pathlib, os
|
|
from cryptography.hazmat.primitives import serialization
|
|
from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey
|
|
|
|
stage = pathlib.Path(os.environ['STAGE'])
|
|
anchor = stage / 'layers' / 'time-anchor.json'
|
|
dsse_path = stage / 'layers' / 'time-anchor.dsse.json'
|
|
out_path = stage.parent / 'time-anchor.dsse.json'
|
|
key_path = pathlib.Path(os.environ['SIGN_KEY'])
|
|
|
|
key: Ed25519PrivateKey = serialization.load_pem_private_key(key_path.read_bytes(), password=None)
|
|
payload = anchor.read_bytes()
|
|
sig = key.sign(payload)
|
|
pub_path = key_path.with_suffix('.pub')
|
|
pub_key = serialization.load_pem_public_key(pub_path.read_bytes())
|
|
pub_raw = pub_path.read_bytes()
|
|
|
|
def b64url(data: bytes) -> str:
|
|
return base64.urlsafe_b64encode(data).rstrip(b"=").decode()
|
|
|
|
dsse = {
|
|
"payloadType": "application/vnd.stellaops.time-anchor+json",
|
|
"payload": b64url(payload),
|
|
"signatures": [{"keyid": base64.urlsafe_b64encode(pub_raw).decode(), "sig": b64url(sig)}]
|
|
}
|
|
dsse_json = json.dumps(dsse, indent=2, sort_keys=True) + "\n"
|
|
dsse_path.write_text(dsse_json, encoding='utf-8')
|
|
out_path.write_text(dsse_json, encoding='utf-8')
|
|
print(f"Signed time-anchor DSSE -> {out_path}")
|
|
PY
|
|
fi
|
|
DATA
|
|
fi
|
|
|
|
cat > "$STAGE/layers/transport-plan.json" <<JSON
|
|
{
|
|
"chunkSizeBytes": $CHUNK_SIZE,
|
|
"compression": "gzip",
|
|
"checkpointFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
|
"chainOfCustody": [
|
|
{"step": "build", "actor": "make-thin-v1.sh", "evidence": "sha256:$MAKE_HASH", "negativePaths": ["missing-layer", "non-deterministic-tar"]},
|
|
{"step": "sign", "actor": "sign_thin_bundle.py", "expectedEnvelope": "mirror-thin-v1.manifest.dsse.json", "keyid": "$SIGN_KEY_ID", "toolDigest": "sha256:$SIGN_HASH"}
|
|
],
|
|
"chunking": {"maxChunks": 128, "strategy": "deterministic-size"},
|
|
"ingest": {"expectedLatencySeconds": 120, "retryPolicy": "exponential"}
|
|
}
|
|
JSON
|
|
|
|
cat > "$STAGE/layers/rekor-policy.json" <<JSON
|
|
{
|
|
"rk1_enforceDsse": true,
|
|
"rk2_payloadMaxBytes": 1048576,
|
|
"rk3_routing": {"public": "hashedrekord", "private": "hashedrekord"},
|
|
"rk4_shardCheckpoint": "per-tenant-per-day",
|
|
"rk5_idempotentKeys": true,
|
|
"rk6_sigstoreBundleIncluded": true,
|
|
"rk7_checkpointFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
|
"rk8_pqDualSign": $([[ "$PQ_CO_SIGN_REQUIRED" == "1" ]] && echo true || echo false),
|
|
"rk9_errorTaxonomy": ["quota", "payload-too-large", "invalid-signature", "stale-checkpoint"],
|
|
"rk10_annotations": ["policy", "graph-edge"]
|
|
}
|
|
JSON
|
|
|
|
cat > "$STAGE/layers/mirror-policy.json" <<JSON
|
|
{
|
|
"schemaVersion": "mirror-thin-v1",
|
|
"semver": "1.0.0",
|
|
"dsseTufRotationDays": 30,
|
|
"pqDualSign": $([[ "$PQ_CO_SIGN_REQUIRED" == "1" ]] && echo true || echo false),
|
|
"delta": {"tombstones": true, "baseHashRequired": true},
|
|
"timeAnchorFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
|
"tenantScope": "$TENANT_SCOPE",
|
|
"environment": "$ENV_SCOPE",
|
|
"distributionIntegrity": {"http": "sha256+dsse", "oci": "tuf+dsse", "object": "checksum+length"},
|
|
"chunking": {"sizeBytes": $CHUNK_SIZE, "maxChunks": 128},
|
|
"verifyScript": "scripts/mirror/verify_thin_bundle.py",
|
|
"metrics": {"build": "required", "import": "required", "verify": "required"},
|
|
"changelog": {"current": "mirror-thin-v1", "notes": "Adds offline/rekor policy coverage (MS1-MS10)"}
|
|
}
|
|
JSON
|
|
|
|
cat > "$STAGE/layers/offline-kit-policy.json" <<JSON
|
|
{
|
|
"okVersion": "1.0.0",
|
|
"keyManifest": {"rotationDays": 90, "pqCosignAllowed": $([[ "$PQ_CO_SIGN_REQUIRED" == "1" ]] && echo true || echo false)},
|
|
"toolHashing": true,
|
|
"topLevelDsse": true,
|
|
"checkpointFreshnessSeconds": $CHECKPOINT_FRESHNESS,
|
|
"deterministicFlags": ["tar --sort=name --owner=0 --group=0 --numeric-owner --mtime=1970-01-01", "gzip -n"],
|
|
"contentHashes": "layers/artifact-hashes.json",
|
|
"timeAnchorPath": "layers/time-anchor.json",
|
|
"transportPlan": "layers/transport-plan.json",
|
|
"tenant": "$TENANT_SCOPE",
|
|
"environment": "$ENV_SCOPE",
|
|
"verifyScript": "scripts/mirror/verify_thin_bundle.py"
|
|
}
|
|
JSON
|
|
|
|
cat > "$STAGE/indexes/observations.index" <<'DATA'
|
|
obs-001 layers/observations.ndjson:1
|
|
obs-002 layers/observations.ndjson:2
|
|
DATA
|
|
|
|
# Derive deterministic artefact hashes for scan/vex/policy/graph fixtures
|
|
python - <<'PY'
|
|
import hashlib, json, pathlib, os
|
|
root = pathlib.Path(os.environ['STAGE'])
|
|
|
|
def sha(path: pathlib.Path) -> str:
|
|
h = hashlib.sha256()
|
|
with path.open('rb') as f:
|
|
for chunk in iter(lambda: f.read(8192), b''):
|
|
h.update(chunk)
|
|
return 'sha256:' + h.hexdigest()
|
|
|
|
targets = {
|
|
'scan': sha(root / 'layers' / 'observations.ndjson'),
|
|
'vex': sha(root / 'layers' / 'observations.ndjson'),
|
|
'policy': sha(root / 'layers' / 'mirror-policy.json'),
|
|
'graph': sha(root / 'layers' / 'rekor-policy.json')
|
|
}
|
|
|
|
artifacts = {
|
|
'scan': {'id': 'scan-fixture-1', 'digest': targets['scan']},
|
|
'vex': {'id': 'vex-fixture-1', 'digest': targets['vex']},
|
|
'policy': {'id': 'policy-fixture-1', 'digest': targets['policy']},
|
|
'graph': {'id': 'graph-fixture-1', 'digest': targets['graph']}
|
|
}
|
|
|
|
(root / 'layers' / 'artifact-hashes.json').write_text(
|
|
json.dumps({'artifacts': artifacts}, indent=2, sort_keys=True) + '\n', encoding='utf-8'
|
|
)
|
|
PY
|
|
|
|
# 2) Build manifest from staged files
|
|
python - <<'PY'
|
|
import json, hashlib, os, pathlib
|
|
root = pathlib.Path(os.environ['STAGE'])
|
|
created = os.environ['CREATED']
|
|
|
|
def digest(path: pathlib.Path) -> str:
|
|
h = hashlib.sha256()
|
|
with path.open('rb') as f:
|
|
for chunk in iter(lambda: f.read(8192), b''):
|
|
h.update(chunk)
|
|
return 'sha256:' + h.hexdigest()
|
|
|
|
def size(path: pathlib.Path) -> int:
|
|
return path.stat().st_size
|
|
|
|
layers = []
|
|
for path in sorted((root / 'layers').glob('*')):
|
|
layers.append({
|
|
'path': f"layers/{path.name}",
|
|
'size': size(path),
|
|
'digest': digest(path)
|
|
})
|
|
|
|
indexes = []
|
|
for path in sorted((root / 'indexes').glob('*')):
|
|
indexes.append({
|
|
'name': path.name,
|
|
'digest': digest(path)
|
|
})
|
|
|
|
manifest = {
|
|
'version': '1.0.0',
|
|
'created': created,
|
|
'layers': layers,
|
|
'indexes': indexes
|
|
}
|
|
|
|
manifest_path = root / 'manifest.json'
|
|
manifest_path.write_text(json.dumps(manifest, indent=2, sort_keys=True) + '\n', encoding='utf-8')
|
|
PY
|
|
|
|
# 3) Tarball with deterministic metadata
|
|
pushd "$OUT" >/dev/null
|
|
rm -f mirror-thin-v1.tar.gz mirror-thin-v1.tar.gz.sha256 mirror-thin-v1.manifest.json mirror-thin-v1.manifest.json.sha256
|
|
cp "$STAGE/manifest.json" mirror-thin-v1.manifest.json
|
|
export GZIP=-n
|
|
/usr/bin/tar --sort=name --owner=0 --group=0 --numeric-owner --mtime='1970-01-01' -czf mirror-thin-v1.tar.gz -C "$STAGE" .
|
|
popd >/dev/null
|
|
|
|
# 4) Checksums
|
|
pushd "$OUT" >/dev/null
|
|
sha256sum mirror-thin-v1.manifest.json > mirror-thin-v1.manifest.json.sha256
|
|
sha256sum mirror-thin-v1.tar.gz > mirror-thin-v1.tar.gz.sha256
|
|
popd >/dev/null
|
|
|
|
# 5) Optional OCI archive (MIRROR-CRT-57-001)
|
|
if [[ "${OCI:-0}" == "1" ]]; then
|
|
OCI_DIR="$OUT/oci"
|
|
BLOBS="$OCI_DIR/blobs/sha256"
|
|
mkdir -p "$BLOBS"
|
|
# layer = thin tarball
|
|
LAYER_SHA=$(sha256sum "$OUT/mirror-thin-v1.tar.gz" | awk '{print $1}')
|
|
cp "$OUT/mirror-thin-v1.tar.gz" "$BLOBS/$LAYER_SHA"
|
|
LAYER_SIZE=$(stat -c%s "$OUT/mirror-thin-v1.tar.gz")
|
|
# config = minimal empty config
|
|
CONFIG_TMP=$(mktemp)
|
|
echo '{"architecture":"amd64","os":"linux"}' > "$CONFIG_TMP"
|
|
CONFIG_SHA=$(sha256sum "$CONFIG_TMP" | awk '{print $1}')
|
|
CONFIG_SIZE=$(stat -c%s "$CONFIG_TMP")
|
|
cp "$CONFIG_TMP" "$BLOBS/$CONFIG_SHA"
|
|
rm "$CONFIG_TMP"
|
|
mkdir -p "$OCI_DIR"
|
|
cat > "$OCI_DIR/oci-layout" <<'JSON'
|
|
{
|
|
"imageLayoutVersion": "1.0.0"
|
|
}
|
|
JSON
|
|
MANIFEST_FILE="$OCI_DIR/manifest.json"
|
|
cat > "$MANIFEST_FILE" <<JSON
|
|
{
|
|
"schemaVersion": 2,
|
|
"config": {
|
|
"mediaType": "application/vnd.oci.image.config.v1+json",
|
|
"size": $CONFIG_SIZE,
|
|
"digest": "sha256:$CONFIG_SHA"
|
|
},
|
|
"layers": [
|
|
{
|
|
"mediaType": "application/vnd.oci.image.layer.v1.tar+gzip",
|
|
"size": $LAYER_SIZE,
|
|
"digest": "sha256:$LAYER_SHA",
|
|
"annotations": {"org.stellaops.bundle.type": "mirror-thin-v1"}
|
|
}
|
|
]
|
|
}
|
|
JSON
|
|
MANIFEST_SHA=$(sha256sum "$MANIFEST_FILE" | awk '{print $1}')
|
|
MANIFEST_SIZE=$(stat -c%s "$MANIFEST_FILE")
|
|
cat > "$OCI_DIR/index.json" <<JSON
|
|
{
|
|
"schemaVersion": 2,
|
|
"manifests": [
|
|
{
|
|
"mediaType": "application/vnd.oci.image.manifest.v1+json",
|
|
"digest": "sha256:$MANIFEST_SHA",
|
|
"size": $MANIFEST_SIZE,
|
|
"annotations": {"org.opencontainers.image.ref.name": "mirror-thin-v1"}
|
|
}
|
|
]
|
|
}
|
|
JSON
|
|
fi
|
|
|
|
# 6) Bundle-level manifest for offline/rekor/mirror gaps
|
|
python - <<'PY'
|
|
import hashlib, json, os, pathlib
|
|
|
|
stage = pathlib.Path(os.environ['STAGE'])
|
|
out = stage.parent
|
|
root = stage.parents[3]
|
|
created = os.environ['CREATED']
|
|
tenant = os.environ['TENANT_SCOPE']
|
|
environment = os.environ['ENV_SCOPE']
|
|
chunk = int(os.environ['CHUNK_SIZE'])
|
|
fresh = int(os.environ['CHECKPOINT_FRESHNESS'])
|
|
pq = os.environ.get('PQ_CO_SIGN_REQUIRED', '0') == '1'
|
|
sign_key = os.environ.get('SIGN_KEY')
|
|
sign_key_id = os.environ.get('SIGN_KEY_ID', 'pending')
|
|
|
|
def sha(path: pathlib.Path) -> str:
|
|
h = hashlib.sha256()
|
|
with path.open('rb') as f:
|
|
for chunk in iter(lambda: f.read(8192), b''):
|
|
h.update(chunk)
|
|
return h.hexdigest()
|
|
|
|
manifest_path = out / 'mirror-thin-v1.manifest.json'
|
|
tar_path = out / 'mirror-thin-v1.tar.gz'
|
|
time_anchor = stage / 'layers' / 'time-anchor.json'
|
|
time_anchor_dsse = out / 'time-anchor.dsse.json'
|
|
transport_plan = stage / 'layers' / 'transport-plan.json'
|
|
rekor_policy = stage / 'layers' / 'rekor-policy.json'
|
|
mirror_policy = stage / 'layers' / 'mirror-policy.json'
|
|
offline_policy = stage / 'layers' / 'offline-kit-policy.json'
|
|
artifact_hashes = stage / 'layers' / 'artifact-hashes.json'
|
|
oci_index = out / 'oci' / 'index.json'
|
|
|
|
tooling = {
|
|
'make_thin_v1_sh': sha(root / 'src' / 'Mirror' / 'StellaOps.Mirror.Creator' / 'make-thin-v1.sh'),
|
|
'sign_script': sha(root / 'scripts' / 'mirror' / 'sign_thin_bundle.py'),
|
|
'verify_script': sha(root / 'scripts' / 'mirror' / 'verify_thin_bundle.py'),
|
|
'verify_oci': sha(root / 'scripts' / 'mirror' / 'verify_oci_layout.py'),
|
|
}
|
|
|
|
bundle = {
|
|
'bundle': 'mirror-thin-v1',
|
|
'version': '1.0.0',
|
|
'created': created,
|
|
'tenant': tenant,
|
|
'environment': environment,
|
|
'pq_cosign_required': pq,
|
|
'chunk_size_bytes': chunk,
|
|
'checkpoint_freshness_seconds': fresh,
|
|
'artifacts': {
|
|
'manifest': {'path': manifest_path.name, 'sha256': sha(manifest_path)},
|
|
'tarball': {'path': tar_path.name, 'sha256': sha(tar_path)},
|
|
'manifest_dsse': {'path': 'mirror-thin-v1.manifest.dsse.json', 'sha256': None},
|
|
'bundle_meta': {'path': 'mirror-thin-v1.bundle.json', 'sha256': None},
|
|
'bundle_dsse': {'path': 'mirror-thin-v1.bundle.dsse.json', 'sha256': None},
|
|
'time_anchor': {'path': time_anchor.name, 'sha256': sha(time_anchor)},
|
|
'time_anchor_dsse': {'path': time_anchor_dsse.name, 'sha256': sha(time_anchor_dsse)} if time_anchor_dsse.exists() else None,
|
|
'transport_plan': {'path': transport_plan.name, 'sha256': sha(transport_plan)},
|
|
'rekor_policy': {'path': rekor_policy.name, 'sha256': sha(rekor_policy)},
|
|
'mirror_policy': {'path': mirror_policy.name, 'sha256': sha(mirror_policy)},
|
|
'offline_policy': {'path': offline_policy.name, 'sha256': sha(offline_policy)},
|
|
'artifact_hashes': {'path': artifact_hashes.name, 'sha256': sha(artifact_hashes)},
|
|
'oci_index': {'path': 'oci/index.json', 'sha256': sha(oci_index)} if oci_index.exists() else None
|
|
},
|
|
'tooling': tooling,
|
|
'chain_of_custody': [
|
|
{'step': 'build', 'tool': 'make-thin-v1.sh', 'sha256': tooling['make_thin_v1_sh']},
|
|
{'step': 'sign', 'tool': 'sign_thin_bundle.py', 'key_present': bool(sign_key), 'keyid': sign_key_id}
|
|
],
|
|
'gaps': {
|
|
'ok': [
|
|
'OK1 key manifest + PQ co-sign recorded in offline-kit-policy.json',
|
|
'OK2 tool hashing captured in bundle_meta.tooling',
|
|
'OK3 DSSE top-level manifest planned via bundle.dsse',
|
|
'OK4 checkpoint freshness enforced with checkpoint_freshness_seconds',
|
|
'OK5 deterministic packaging flags recorded in offline-kit-policy.json',
|
|
'OK6 scan/VEX/policy/graph hashes captured in artifact-hashes.json',
|
|
'OK7 time anchor bundled as layers/time-anchor.json',
|
|
'OK8 transport + chunking defined in transport-plan.json',
|
|
'OK9 tenant/environment scoping recorded in bundle meta',
|
|
'OK10 scripted verify path is scripts/mirror/verify_thin_bundle.py'
|
|
],
|
|
'rk': [
|
|
'RK1 enforce dsse/hashedrekord policy in rekor-policy.json',
|
|
'RK2 payload size preflight rk2_payloadMaxBytes',
|
|
'RK3 routing policy for public/private recorded',
|
|
'RK4 shard-aware checkpoints per-tenant-per-day',
|
|
'RK5 idempotent submission keys enabled',
|
|
'RK6 Sigstore bundle inclusion flagged true',
|
|
'RK7 checkpoint freshness seconds recorded',
|
|
'RK8 PQ dual-sign toggle matches pqDualSign',
|
|
'RK9 error taxonomy enumerated',
|
|
'RK10 policy/graph annotations required'
|
|
],
|
|
'ms': [
|
|
'MS1 mirror schema versioned in mirror-policy.json',
|
|
'MS2 DSSE/TUF rotation days recorded',
|
|
'MS3 delta spec includes tombstones + base hash',
|
|
'MS4 time-anchor freshness enforced',
|
|
'MS5 tenant/env scoping captured',
|
|
'MS6 distribution integrity rules documented',
|
|
'MS7 chunking/size rules recorded',
|
|
'MS8 verify script pinned',
|
|
'MS9 metrics/alerts required',
|
|
'MS10 semver/changelog noted'
|
|
]
|
|
}
|
|
}
|
|
|
|
bundle_path = out / 'mirror-thin-v1.bundle.json'
|
|
bundle_path.write_text(json.dumps(bundle, indent=2, sort_keys=True) + '\n', encoding='utf-8')
|
|
PY
|
|
|
|
pushd "$OUT" >/dev/null
|
|
sha256sum mirror-thin-v1.bundle.json > mirror-thin-v1.bundle.json.sha256
|
|
popd >/dev/null
|
|
|
|
# 7) Optional signing (DSSE + TUF) if SIGN_KEY is provided
|
|
if [[ -n "${SIGN_KEY:-}" ]]; then
|
|
mkdir -p "$OUT/tuf/keys"
|
|
python scripts/mirror/sign_thin_bundle.py \
|
|
--key "$SIGN_KEY" \
|
|
--manifest "$OUT/mirror-thin-v1.manifest.json" \
|
|
--tar "$OUT/mirror-thin-v1.tar.gz" \
|
|
--tuf-dir "$OUT/tuf" \
|
|
--bundle "$OUT/mirror-thin-v1.bundle.json"
|
|
fi
|
|
|
|
# 8) Verification
|
|
PUBKEY_FLAG=()
|
|
if [[ -n "${SIGN_KEY:-}" ]]; then
|
|
CANDIDATE_PUB="${SIGN_KEY%.pem}.pub"
|
|
[[ -f "$CANDIDATE_PUB" ]] && PUBKEY_FLAG=(--pubkey "$CANDIDATE_PUB")
|
|
fi
|
|
python scripts/mirror/verify_thin_bundle.py \
|
|
"$OUT/mirror-thin-v1.manifest.json" \
|
|
"$OUT/mirror-thin-v1.tar.gz" \
|
|
--bundle-meta "$OUT/mirror-thin-v1.bundle.json" \
|
|
--tenant "$TENANT_SCOPE" \
|
|
--environment "$ENV_SCOPE" \
|
|
"${PUBKEY_FLAG[@]:-}"
|
|
|
|
echo "mirror-thin-v1 built at $OUT"
|