consolidation of some of the modules, localization fixes, product advisories work, qa work

This commit is contained in:
master
2026-03-05 03:54:22 +02:00
parent 7bafcc3eef
commit 8e1cb9448d
3878 changed files with 72600 additions and 46861 deletions

View File

@@ -0,0 +1,69 @@
#!/usr/bin/env python3
"""Deterministic Rekor error-mode shim used by negative path test lane."""
from __future__ import annotations
from dataclasses import dataclass
@dataclass(frozen=True)
class RekorCase:
case_id: str
status_code: int
entry_type: str
expected_code: str
message: str
response_body: dict[str, object]
def default_cases() -> list[RekorCase]:
return [
RekorCase(
case_id="oversized-payload-413",
status_code=413,
entry_type="intoto",
expected_code="payload_too_large",
message="payload size exceeds configured limit",
response_body={"error": "payload too large", "maxBytes": 10_000_000},
),
RekorCase(
case_id="unsupported-entry-type-400",
status_code=400,
entry_type="unknown",
expected_code="unsupported_entry_type",
message="unsupported entry type",
response_body={"error": "unsupported entry type", "entryType": "unknown"},
),
RekorCase(
case_id="failed-dependency-424",
status_code=424,
entry_type="intoto",
expected_code="failed_dependency",
message="rekor backend dependency failure",
response_body={"error": "ledger gap", "reprocessToken": "rekor-gap-001"},
),
RekorCase(
case_id="gateway-timeout-504",
status_code=504,
entry_type="intoto",
expected_code="upstream_timeout",
message="rekor upstream timeout",
response_body={"error": "timeout", "retryAfterSeconds": 30},
),
RekorCase(
case_id="accepted-for-reprocess-202",
status_code=202,
entry_type="intoto",
expected_code="reprocess_pending",
message="accepted for asynchronous replay",
response_body={"status": "accepted", "reprocessToken": "rekor-async-001"},
),
]
def simulate_submit(case: RekorCase) -> tuple[int, dict[str, object], dict[str, str]]:
headers = {
"x-correlation-id": f"corr-{case.case_id}",
"content-type": "application/json",
}
return case.status_code, dict(case.response_body), headers

View File

@@ -0,0 +1,140 @@
#!/usr/bin/env python3
"""Run deterministic Rekor/DSSE negative-path verification suite."""
from __future__ import annotations
import argparse
import hashlib
import json
import pathlib
import tarfile
import time
from rekor_shim import RekorCase, default_cases, simulate_submit
import sys
TOOLS_DIR = pathlib.Path(__file__).resolve().parents[1] / "tools"
sys.path.insert(0, str(TOOLS_DIR))
from emit_artifacts import TestCaseResult, write_junit # noqa: E402
def _classify(case: RekorCase, status: int, body: dict[str, object]) -> tuple[str, str | None]:
reprocess_token = str(body.get("reprocessToken")) if body.get("reprocessToken") else None
if status == 413:
return "payload_too_large", None
if status == 424:
return "failed_dependency", reprocess_token or f"retry-{case.case_id}"
if status == 504:
return "upstream_timeout", f"timeout-{case.case_id}"
if status == 202:
return "reprocess_pending", reprocess_token or f"pending-{case.case_id}"
if status == 400 and case.entry_type == "unknown":
return "unsupported_entry_type", None
return "unexpected_rekor_status", None
def _token(case_id: str) -> str:
return hashlib.sha256(case_id.encode("utf-8")).hexdigest()[:16]
def _write_tar(source_dir: pathlib.Path, tar_path: pathlib.Path) -> None:
tar_path.parent.mkdir(parents=True, exist_ok=True)
with tarfile.open(tar_path, "w:gz") as archive:
for file in sorted(path for path in source_dir.rglob("*") if path.is_file()):
archive.add(file, arcname=file.relative_to(source_dir).as_posix())
def main() -> int:
parser = argparse.ArgumentParser(description="Run Rekor negative path suite.")
parser.add_argument(
"--output",
type=pathlib.Path,
default=pathlib.Path("out/supply-chain/03-rekor-neg"),
)
args = parser.parse_args()
start = time.perf_counter()
output = args.output.resolve()
output.mkdir(parents=True, exist_ok=True)
diagnostics_root = output / "diagnostics"
diagnostics_root.mkdir(parents=True, exist_ok=True)
cases = default_cases()
junit_cases: list[TestCaseResult] = []
report_cases: list[dict[str, object]] = []
failures = 0
for case in cases:
case_start = time.perf_counter()
status, body, headers = simulate_submit(case)
code, reprocess = _classify(case, status, body)
expected = case.expected_code
passed = code == expected
if not passed:
failures += 1
case_token = reprocess or _token(case.case_id)
diagnostic = {
"caseId": case.case_id,
"upstream": {
"statusCode": status,
"body": body,
"headers": headers,
},
"machineReadableErrorClass": code,
"expectedErrorClass": expected,
"reprocessToken": case_token,
}
case_dir = diagnostics_root / case.case_id
case_dir.mkdir(parents=True, exist_ok=True)
(case_dir / "diagnostic_blob.json").write_text(
json.dumps(diagnostic, sort_keys=True, indent=2) + "\n",
encoding="utf-8",
)
report_cases.append(
{
"caseId": case.case_id,
"statusCode": status,
"entryType": case.entry_type,
"machineReadableErrorClass": code,
"expectedErrorClass": expected,
"reprocessToken": case_token,
"passed": passed,
}
)
junit_cases.append(
TestCaseResult(
suite="03-rekor-neg",
name=case.case_id,
passed=passed,
duration_seconds=time.perf_counter() - case_start,
failure_message=None if passed else f"expected={expected} actual={code}",
)
)
_write_tar(diagnostics_root, output / "rekor_negative_cases.tar.gz")
report = {
"durationSeconds": round(time.perf_counter() - start, 4),
"failures": failures,
"cases": report_cases,
"machineReadableErrorClasses": sorted(
{
"payload_too_large",
"unsupported_entry_type",
"failed_dependency",
"upstream_timeout",
"reprocess_pending",
}
),
}
(output / "report.json").write_text(json.dumps(report, sort_keys=True, indent=2) + "\n", encoding="utf-8")
write_junit(output / "junit.xml", junit_cases)
return 0 if failures == 0 else 1
if __name__ == "__main__":
raise SystemExit(main())