Gaps fill up, fixes, ui restructuring
This commit is contained in:
125
.gitea/workflows/templates/dsse-attest-verify-check.yml
Normal file
125
.gitea/workflows/templates/dsse-attest-verify-check.yml
Normal file
@@ -0,0 +1,125 @@
|
||||
# =============================================================================
|
||||
# dsse-attest-verify-check.yml
|
||||
# Sprint: SPRINT_20260219_011 (CIAP-02)
|
||||
# Description: Signs SBOM with DSSE, verifies attestation, validates Rekor proof
|
||||
# =============================================================================
|
||||
#
|
||||
# This workflow creates a DSSE attestation for an SBOM, verifies it, and
|
||||
# optionally validates the Rekor transparency log inclusion proof.
|
||||
#
|
||||
# Supports both keyless (Fulcio/OIDC) and keyed (cosign key) signing modes.
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
name: DSSE Attest + Verify + Rekor Check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
subject_ref:
|
||||
description: 'OCI image reference (registry/repo@sha256:...)'
|
||||
required: true
|
||||
type: string
|
||||
predicate_path:
|
||||
description: 'Path to the DSSE predicate JSON file'
|
||||
required: true
|
||||
type: string
|
||||
signing_mode:
|
||||
description: 'Signing mode: keyless (Fulcio/OIDC) or key (cosign key)'
|
||||
required: false
|
||||
type: string
|
||||
default: 'keyless'
|
||||
public_key_path:
|
||||
description: 'Path to cosign public key PEM (required for key mode)'
|
||||
required: false
|
||||
type: string
|
||||
predicate_type:
|
||||
description: 'Predicate type URI for the attestation'
|
||||
required: false
|
||||
type: string
|
||||
default: 'https://cyclonedx.org/bom'
|
||||
skip_rekor:
|
||||
description: 'Skip Rekor transparency log (for air-gapped environments)'
|
||||
required: false
|
||||
type: boolean
|
||||
default: false
|
||||
|
||||
jobs:
|
||||
attest-and-verify:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
id-token: write # For OIDC-based keyless signing
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install cosign
|
||||
uses: sigstore/cosign-installer@v3
|
||||
|
||||
- name: Sign attestation
|
||||
id: sign
|
||||
env:
|
||||
COSIGN_EXPERIMENTAL: '1'
|
||||
run: |
|
||||
SIGN_FLAGS="--predicate ${{ inputs.predicate_path }}"
|
||||
SIGN_FLAGS="${SIGN_FLAGS} --type ${{ inputs.predicate_type }}"
|
||||
|
||||
if [ "${{ inputs.signing_mode }}" = "key" ]; then
|
||||
# Keyed signing
|
||||
SIGN_FLAGS="${SIGN_FLAGS} --key ${{ inputs.public_key_path }}"
|
||||
fi
|
||||
|
||||
if [ "${{ inputs.skip_rekor }}" = "true" ]; then
|
||||
SIGN_FLAGS="${SIGN_FLAGS} --tlog-upload=false"
|
||||
fi
|
||||
|
||||
cosign attest ${SIGN_FLAGS} "${{ inputs.subject_ref }}"
|
||||
|
||||
echo "### Attestation Signed" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Subject: \`${{ inputs.subject_ref }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Mode: ${{ inputs.signing_mode }}" >> $GITHUB_STEP_SUMMARY
|
||||
echo "- Predicate type: \`${{ inputs.predicate_type }}\`" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Verify attestation
|
||||
id: verify
|
||||
run: |
|
||||
VERIFY_FLAGS="--type ${{ inputs.predicate_type }}"
|
||||
|
||||
if [ "${{ inputs.signing_mode }}" = "key" ]; then
|
||||
VERIFY_FLAGS="${VERIFY_FLAGS} --key ${{ inputs.public_key_path }}"
|
||||
else
|
||||
# Keyless: verify against Sigstore trust root
|
||||
VERIFY_FLAGS="${VERIFY_FLAGS} --certificate-identity-regexp '.*'"
|
||||
VERIFY_FLAGS="${VERIFY_FLAGS} --certificate-oidc-issuer-regexp '.*'"
|
||||
fi
|
||||
|
||||
cosign verify-attestation ${VERIFY_FLAGS} "${{ inputs.subject_ref }}"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "Attestation verification: PASS" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "Attestation verification: FAIL" >> $GITHUB_STEP_SUMMARY
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Validate Rekor inclusion proof
|
||||
if: inputs.skip_rekor != true
|
||||
run: |
|
||||
# Fetch the Rekor entry for our attestation
|
||||
DIGEST=$(sha256sum "${{ inputs.predicate_path }}" | cut -d' ' -f1)
|
||||
|
||||
# Use rekor-cli to search and verify
|
||||
if command -v rekor-cli &> /dev/null; then
|
||||
ENTRY=$(rekor-cli search --sha "sha256:${DIGEST}" 2>/dev/null | head -1)
|
||||
if [ -n "${ENTRY}" ]; then
|
||||
rekor-cli verify --artifact "${{ inputs.predicate_path }}" --entry "${ENTRY}"
|
||||
echo "Rekor inclusion proof: PASS (entry: ${ENTRY})" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "Rekor entry not found (may be pending)" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
else
|
||||
echo "rekor-cli not available, skipping Rekor verification" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
135
.gitea/workflows/templates/sbom-canonicalization-check.yml
Normal file
135
.gitea/workflows/templates/sbom-canonicalization-check.yml
Normal file
@@ -0,0 +1,135 @@
|
||||
# =============================================================================
|
||||
# sbom-canonicalization-check.yml
|
||||
# Sprint: SPRINT_20260219_011 (CIAP-01)
|
||||
# Description: Validates CycloneDX SBOM and verifies canonical_id determinism
|
||||
# =============================================================================
|
||||
#
|
||||
# This workflow validates an SBOM against the CycloneDX schema, computes
|
||||
# the canonical_id (sha256 of JCS-canonicalized JSON), and verifies
|
||||
# that canonicalization is deterministic across runs.
|
||||
#
|
||||
# Usage:
|
||||
# 1. Copy to your project's .gitea/workflows/ directory
|
||||
# 2. Set BOM_PATH to your SBOM output location
|
||||
# 3. Optionally set EXPECTED_CANONICAL_ID for regression testing
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
name: SBOM Canonicalization Check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
bom_path:
|
||||
description: 'Path to CycloneDX SBOM JSON file'
|
||||
required: true
|
||||
type: string
|
||||
expected_canonical_id:
|
||||
description: 'Expected canonical_id for regression testing (optional)'
|
||||
required: false
|
||||
type: string
|
||||
outputs:
|
||||
canonical_id:
|
||||
description: 'Computed canonical_id (sha256:<hex>)'
|
||||
value: ${{ jobs.canonicalize.outputs.canonical_id }}
|
||||
validation_result:
|
||||
description: 'Schema validation result (pass/fail)'
|
||||
value: ${{ jobs.canonicalize.outputs.validation_result }}
|
||||
|
||||
jobs:
|
||||
canonicalize:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
canonical_id: ${{ steps.compute.outputs.canonical_id }}
|
||||
validation_result: ${{ steps.validate.outputs.result }}
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate CycloneDX schema
|
||||
id: validate
|
||||
run: |
|
||||
# Validate SBOM against CycloneDX 1.7 schema
|
||||
if command -v sbom-utility &> /dev/null; then
|
||||
sbom-utility validate -i "${{ inputs.bom_path }}" --force
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "result=pass" >> $GITHUB_OUTPUT
|
||||
echo "Schema validation: PASS" >> $GITHUB_STEP_SUMMARY
|
||||
else
|
||||
echo "result=fail" >> $GITHUB_OUTPUT
|
||||
echo "Schema validation: FAIL" >> $GITHUB_STEP_SUMMARY
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
# Fallback: basic JSON validation with ajv
|
||||
npx ajv-cli validate -s docs/schemas/cyclonedx-bom-1.7.schema.json -d "${{ inputs.bom_path }}" || {
|
||||
echo "result=fail" >> $GITHUB_OUTPUT
|
||||
exit 1
|
||||
}
|
||||
echo "result=pass" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Compute canonical_id
|
||||
id: compute
|
||||
run: |
|
||||
# JCS canonicalize and compute SHA-256
|
||||
# Uses Python for RFC 8785 compliance (json.loads + sorted keys + separators)
|
||||
CANONICAL_ID=$(python3 -c "
|
||||
import json, hashlib, sys
|
||||
with open('${{ inputs.bom_path }}', 'rb') as f:
|
||||
obj = json.load(f)
|
||||
canonical = json.dumps(obj, sort_keys=True, separators=(',', ':'), ensure_ascii=False).encode('utf-8')
|
||||
digest = hashlib.sha256(canonical).hexdigest()
|
||||
print(f'sha256:{digest}')
|
||||
")
|
||||
|
||||
echo "canonical_id=${CANONICAL_ID}" >> $GITHUB_OUTPUT
|
||||
echo "### Canonical SBOM ID" >> $GITHUB_STEP_SUMMARY
|
||||
echo "\`${CANONICAL_ID}\`" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Verify determinism (double-compute)
|
||||
run: |
|
||||
# Canonicalize twice, verify identical output
|
||||
FIRST=$(python3 -c "
|
||||
import json, hashlib
|
||||
with open('${{ inputs.bom_path }}', 'rb') as f:
|
||||
obj = json.load(f)
|
||||
canonical = json.dumps(obj, sort_keys=True, separators=(',', ':'), ensure_ascii=False).encode('utf-8')
|
||||
print(hashlib.sha256(canonical).hexdigest())
|
||||
")
|
||||
|
||||
SECOND=$(python3 -c "
|
||||
import json, hashlib
|
||||
with open('${{ inputs.bom_path }}', 'rb') as f:
|
||||
obj = json.load(f)
|
||||
canonical = json.dumps(obj, sort_keys=True, separators=(',', ':'), ensure_ascii=False).encode('utf-8')
|
||||
print(hashlib.sha256(canonical).hexdigest())
|
||||
")
|
||||
|
||||
if [ "${FIRST}" != "${SECOND}" ]; then
|
||||
echo "FATAL: Canonicalization is non-deterministic!" >&2
|
||||
echo " Run 1: ${FIRST}" >&2
|
||||
echo " Run 2: ${SECOND}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Determinism check: PASS (hash=${FIRST})" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Regression check (if expected_canonical_id provided)
|
||||
if: inputs.expected_canonical_id != ''
|
||||
run: |
|
||||
ACTUAL="${{ steps.compute.outputs.canonical_id }}"
|
||||
EXPECTED="${{ inputs.expected_canonical_id }}"
|
||||
|
||||
if [ "${ACTUAL}" != "${EXPECTED}" ]; then
|
||||
echo "REGRESSION: canonical_id changed!" >&2
|
||||
echo " Expected: ${EXPECTED}" >&2
|
||||
echo " Actual: ${ACTUAL}" >&2
|
||||
echo "### Regression Detected" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Expected: \`${EXPECTED}\`" >> $GITHUB_STEP_SUMMARY
|
||||
echo "Actual: \`${ACTUAL}\`" >> $GITHUB_STEP_SUMMARY
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Regression check: PASS" >> $GITHUB_STEP_SUMMARY
|
||||
167
.gitea/workflows/templates/vex-mapping-check.yml
Normal file
167
.gitea/workflows/templates/vex-mapping-check.yml
Normal file
@@ -0,0 +1,167 @@
|
||||
# =============================================================================
|
||||
# vex-mapping-check.yml
|
||||
# Sprint: SPRINT_20260219_011 (CIAP-03)
|
||||
# Description: Validates VEX documents and verifies target artifact matching
|
||||
# =============================================================================
|
||||
#
|
||||
# This workflow validates OpenVEX or CycloneDX VEX documents against their
|
||||
# schemas, asserts required fields are present and valid, and optionally
|
||||
# verifies target artifact matches a known canonical_id.
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
name: VEX Mapping Check
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
vex_path:
|
||||
description: 'Path to VEX document (JSON)'
|
||||
required: true
|
||||
type: string
|
||||
vex_format:
|
||||
description: 'VEX format: openvex or cyclonedx'
|
||||
required: false
|
||||
type: string
|
||||
default: 'openvex'
|
||||
canonical_id:
|
||||
description: 'Expected canonical_id of the target artifact (optional)'
|
||||
required: false
|
||||
type: string
|
||||
schema_path:
|
||||
description: 'Path to VEX JSON schema (optional, uses bundled schemas by default)'
|
||||
required: false
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
validate-vex:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Validate VEX schema
|
||||
id: validate
|
||||
run: |
|
||||
VEX_FILE="${{ inputs.vex_path }}"
|
||||
FORMAT="${{ inputs.vex_format }}"
|
||||
|
||||
# Select schema
|
||||
if [ -n "${{ inputs.schema_path }}" ]; then
|
||||
SCHEMA="${{ inputs.schema_path }}"
|
||||
elif [ "${FORMAT}" = "openvex" ]; then
|
||||
SCHEMA="docs/schemas/openvex-0.2.0.schema.json"
|
||||
else
|
||||
SCHEMA="docs/schemas/cyclonedx-bom-1.7.schema.json"
|
||||
fi
|
||||
|
||||
# Validate
|
||||
if [ -f "${SCHEMA}" ]; then
|
||||
npx ajv-cli validate -s "${SCHEMA}" -d "${VEX_FILE}" && {
|
||||
echo "Schema validation: PASS" >> $GITHUB_STEP_SUMMARY
|
||||
} || {
|
||||
echo "Schema validation: FAIL" >> $GITHUB_STEP_SUMMARY
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
echo "Schema file not found: ${SCHEMA}, skipping schema validation" >> $GITHUB_STEP_SUMMARY
|
||||
fi
|
||||
|
||||
- name: Assert required VEX fields
|
||||
run: |
|
||||
FORMAT="${{ inputs.vex_format }}"
|
||||
VEX_FILE="${{ inputs.vex_path }}"
|
||||
|
||||
python3 -c "
|
||||
import json, sys
|
||||
|
||||
with open('${VEX_FILE}') as f:
|
||||
vex = json.load(f)
|
||||
|
||||
errors = []
|
||||
format_name = '${FORMAT}'
|
||||
|
||||
if format_name == 'openvex':
|
||||
# OpenVEX validation
|
||||
if 'statements' not in vex:
|
||||
errors.append('Missing required field: statements')
|
||||
else:
|
||||
for i, stmt in enumerate(vex['statements']):
|
||||
if 'status' not in stmt:
|
||||
errors.append(f'Statement [{i}]: missing status')
|
||||
elif stmt['status'] not in ('affected', 'not_affected', 'fixed', 'under_investigation'):
|
||||
errors.append(f'Statement [{i}]: invalid status: {stmt[\"status\"]}')
|
||||
if 'vulnerability' not in stmt:
|
||||
errors.append(f'Statement [{i}]: missing vulnerability')
|
||||
if 'product' not in stmt and 'products' not in stmt:
|
||||
errors.append(f'Statement [{i}]: missing product or products')
|
||||
else:
|
||||
# CycloneDX VEX (embedded in SBOM vulnerabilities)
|
||||
vulns = vex.get('vulnerabilities', [])
|
||||
if not vulns:
|
||||
errors.append('No vulnerabilities found in CycloneDX VEX')
|
||||
for i, vuln in enumerate(vulns):
|
||||
analysis = vuln.get('analysis', {})
|
||||
state = analysis.get('state')
|
||||
if not state:
|
||||
errors.append(f'Vulnerability [{i}] ({vuln.get(\"id\",\"?\")}): missing analysis.state')
|
||||
elif state not in ('resolved', 'resolved_with_pedigree', 'exploitable', 'in_triage', 'false_positive', 'not_affected'):
|
||||
errors.append(f'Vulnerability [{i}]: invalid analysis.state: {state}')
|
||||
|
||||
if errors:
|
||||
print('VEX field validation FAILED:', file=sys.stderr)
|
||||
for e in errors:
|
||||
print(f' - {e}', file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(f'VEX field validation: PASS ({format_name})')
|
||||
"
|
||||
|
||||
echo "VEX field assertions: PASS" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
- name: Verify target canonical_id match
|
||||
if: inputs.canonical_id != ''
|
||||
run: |
|
||||
FORMAT="${{ inputs.vex_format }}"
|
||||
VEX_FILE="${{ inputs.vex_path }}"
|
||||
EXPECTED_ID="${{ inputs.canonical_id }}"
|
||||
|
||||
python3 -c "
|
||||
import json, sys
|
||||
|
||||
with open('${VEX_FILE}') as f:
|
||||
vex = json.load(f)
|
||||
|
||||
expected = '${EXPECTED_ID}'
|
||||
format_name = '${FORMAT}'
|
||||
found_match = False
|
||||
|
||||
if format_name == 'openvex':
|
||||
for stmt in vex.get('statements', []):
|
||||
product = stmt.get('product', '')
|
||||
products = stmt.get('products', [])
|
||||
targets = [product] if product else products
|
||||
for t in targets:
|
||||
pid = t if isinstance(t, str) else t.get('@id', '')
|
||||
if expected.replace('sha256:', '') in pid or pid == expected:
|
||||
found_match = True
|
||||
break
|
||||
else:
|
||||
# CycloneDX: check affects refs
|
||||
for vuln in vex.get('vulnerabilities', []):
|
||||
for affects in vuln.get('affects', []):
|
||||
ref = affects.get('ref', '')
|
||||
if expected.replace('sha256:', '') in ref:
|
||||
found_match = True
|
||||
break
|
||||
|
||||
if not found_match:
|
||||
print(f'WARNING: canonical_id {expected} not found in VEX targets', file=sys.stderr)
|
||||
print('This may indicate the VEX document does not apply to the expected artifact')
|
||||
# Warning only, not a hard failure
|
||||
else:
|
||||
print(f'Target canonical_id match: PASS')
|
||||
"
|
||||
|
||||
echo "Target artifact check: completed" >> $GITHUB_STEP_SUMMARY
|
||||
Reference in New Issue
Block a user