Files
git.stella-ops.org/tests/supply-chain/03-rekor-neg/run_negative_suite.py

141 lines
4.6 KiB
Python

#!/usr/bin/env python3
"""Run deterministic Rekor/DSSE negative-path verification suite."""
from __future__ import annotations
import argparse
import hashlib
import json
import pathlib
import tarfile
import time
from rekor_shim import RekorCase, default_cases, simulate_submit
import sys
TOOLS_DIR = pathlib.Path(__file__).resolve().parents[1] / "tools"
sys.path.insert(0, str(TOOLS_DIR))
from emit_artifacts import TestCaseResult, write_junit # noqa: E402
def _classify(case: RekorCase, status: int, body: dict[str, object]) -> tuple[str, str | None]:
reprocess_token = str(body.get("reprocessToken")) if body.get("reprocessToken") else None
if status == 413:
return "payload_too_large", None
if status == 424:
return "failed_dependency", reprocess_token or f"retry-{case.case_id}"
if status == 504:
return "upstream_timeout", f"timeout-{case.case_id}"
if status == 202:
return "reprocess_pending", reprocess_token or f"pending-{case.case_id}"
if status == 400 and case.entry_type == "unknown":
return "unsupported_entry_type", None
return "unexpected_rekor_status", None
def _token(case_id: str) -> str:
return hashlib.sha256(case_id.encode("utf-8")).hexdigest()[:16]
def _write_tar(source_dir: pathlib.Path, tar_path: pathlib.Path) -> None:
tar_path.parent.mkdir(parents=True, exist_ok=True)
with tarfile.open(tar_path, "w:gz") as archive:
for file in sorted(path for path in source_dir.rglob("*") if path.is_file()):
archive.add(file, arcname=file.relative_to(source_dir).as_posix())
def main() -> int:
parser = argparse.ArgumentParser(description="Run Rekor negative path suite.")
parser.add_argument(
"--output",
type=pathlib.Path,
default=pathlib.Path("out/supply-chain/03-rekor-neg"),
)
args = parser.parse_args()
start = time.perf_counter()
output = args.output.resolve()
output.mkdir(parents=True, exist_ok=True)
diagnostics_root = output / "diagnostics"
diagnostics_root.mkdir(parents=True, exist_ok=True)
cases = default_cases()
junit_cases: list[TestCaseResult] = []
report_cases: list[dict[str, object]] = []
failures = 0
for case in cases:
case_start = time.perf_counter()
status, body, headers = simulate_submit(case)
code, reprocess = _classify(case, status, body)
expected = case.expected_code
passed = code == expected
if not passed:
failures += 1
case_token = reprocess or _token(case.case_id)
diagnostic = {
"caseId": case.case_id,
"upstream": {
"statusCode": status,
"body": body,
"headers": headers,
},
"machineReadableErrorClass": code,
"expectedErrorClass": expected,
"reprocessToken": case_token,
}
case_dir = diagnostics_root / case.case_id
case_dir.mkdir(parents=True, exist_ok=True)
(case_dir / "diagnostic_blob.json").write_text(
json.dumps(diagnostic, sort_keys=True, indent=2) + "\n",
encoding="utf-8",
)
report_cases.append(
{
"caseId": case.case_id,
"statusCode": status,
"entryType": case.entry_type,
"machineReadableErrorClass": code,
"expectedErrorClass": expected,
"reprocessToken": case_token,
"passed": passed,
}
)
junit_cases.append(
TestCaseResult(
suite="03-rekor-neg",
name=case.case_id,
passed=passed,
duration_seconds=time.perf_counter() - case_start,
failure_message=None if passed else f"expected={expected} actual={code}",
)
)
_write_tar(diagnostics_root, output / "rekor_negative_cases.tar.gz")
report = {
"durationSeconds": round(time.perf_counter() - start, 4),
"failures": failures,
"cases": report_cases,
"machineReadableErrorClasses": sorted(
{
"payload_too_large",
"unsupported_entry_type",
"failed_dependency",
"upstream_timeout",
"reprocess_pending",
}
),
}
(output / "report.json").write_text(json.dumps(report, sort_keys=True, indent=2) + "\n", encoding="utf-8")
write_junit(output / "junit.xml", junit_cases)
return 0 if failures == 0 else 1
if __name__ == "__main__":
raise SystemExit(main())