up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
SDK Publish & Sign / sdk-publish (push) Has been cancelled
sdk-generator-smoke / sdk-smoke (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
SDK Publish & Sign / sdk-publish (push) Has been cancelled
sdk-generator-smoke / sdk-smoke (push) Has been cancelled
This commit is contained in:
2
src/Bench/StellaOps.Bench/Determinism/.gitignore
vendored
Normal file
2
src/Bench/StellaOps.Bench/Determinism/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
results/
|
||||
__pycache__/
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"graph": {
|
||||
"nodes": [
|
||||
{"id": "pkg:pypi/demo-lib@1.0.0", "type": "package"},
|
||||
{"id": "pkg:generic/demo-cli@0.4.2", "type": "package"}
|
||||
],
|
||||
"edges": [
|
||||
{"from": "pkg:generic/demo-cli@0.4.2", "to": "pkg:pypi/demo-lib@1.0.0", "type": "depends_on"}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{"event":"call","func":"demo","module":"demo-lib","ts":"2025-11-01T00:00:00Z"}
|
||||
@@ -1,3 +0,0 @@
|
||||
38453c9c0e0a90d22d7048d3201bf1b5665eb483e6682db1a7112f8e4f4fa1e6 configs/scanners.json
|
||||
577f932bbb00dbd596e46b96d5fbb9561506c7730c097e381a6b34de40402329 inputs/sboms/sample-spdx.json
|
||||
1b54ce4087800cfe1d5ac439c10a1f131b7476b2093b79d8cd0a29169314291f inputs/vex/sample-openvex.json
|
||||
@@ -1,21 +0,0 @@
|
||||
scanner,sbom,vex,mode,run,hash,finding_count
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,0,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,0,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,1,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,1,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,2,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,2,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,3,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,3,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,4,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,4,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,5,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,5,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,6,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,6,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,7,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,7,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,8,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,8,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,canonical,9,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
mock,sample-spdx.json,sample-openvex.json,shuffled,9,d1cc5f0d22e863e457af589fb2c6c1737b67eb586338bccfe23ea7908c8a8b18,2
|
||||
|
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"determinism_rate": 1.0
|
||||
}
|
||||
94
src/Bench/StellaOps.Bench/Determinism/run_reachability.py
Normal file
94
src/Bench/StellaOps.Bench/Determinism/run_reachability.py
Normal file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Reachability dataset hash helper for optional BENCH-DETERMINISM reachability runs.
|
||||
- Computes deterministic hashes for graph JSON and runtime NDJSON inputs.
|
||||
- Emits `results-reach.csv` and `dataset.sha256` in the chosen output directory.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import glob
|
||||
from pathlib import Path
|
||||
from typing import Iterable, List
|
||||
|
||||
|
||||
def sha256_bytes(data: bytes) -> str:
|
||||
return hashlib.sha256(data).hexdigest()
|
||||
|
||||
|
||||
def expand_files(patterns: Iterable[str]) -> List[Path]:
|
||||
files: List[Path] = []
|
||||
for pattern in patterns:
|
||||
if not pattern:
|
||||
continue
|
||||
for path_str in sorted(glob.glob(pattern)):
|
||||
path = Path(path_str)
|
||||
if path.is_file():
|
||||
files.append(path)
|
||||
return files
|
||||
|
||||
|
||||
def hash_files(paths: List[Path]) -> List[tuple[str, str]]:
|
||||
rows: List[tuple[str, str]] = []
|
||||
for path in paths:
|
||||
rows.append((path.name, sha256_bytes(path.read_bytes())))
|
||||
return rows
|
||||
|
||||
|
||||
def write_manifest(paths: List[Path], manifest_path: Path) -> None:
|
||||
lines = []
|
||||
for path in sorted(paths, key=lambda p: str(p)):
|
||||
digest = sha256_bytes(path.read_bytes())
|
||||
try:
|
||||
rel = path.resolve().relative_to(Path.cwd().resolve())
|
||||
except ValueError:
|
||||
rel = path.resolve()
|
||||
lines.append(f"{digest} {rel.as_posix()}\n")
|
||||
manifest_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
manifest_path.write_text("".join(lines), encoding="utf-8")
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Reachability dataset hash helper")
|
||||
parser.add_argument("--graphs", nargs="*", default=["inputs/graphs/*.json"], help="Glob(s) for graph JSON files")
|
||||
parser.add_argument("--runtime", nargs="*", default=["inputs/runtime/*.ndjson", "inputs/runtime/*.ndjson.gz"], help="Glob(s) for runtime NDJSON files")
|
||||
parser.add_argument("--output", default="results", help="Output directory")
|
||||
args = parser.parse_args()
|
||||
|
||||
graphs = expand_files(args.graphs)
|
||||
runtime = expand_files(args.runtime)
|
||||
|
||||
if not graphs:
|
||||
raise SystemExit("No graph inputs found; supply --graphs globs")
|
||||
|
||||
output_dir = Path(args.output)
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
dataset_manifest_files = graphs + runtime
|
||||
write_manifest(dataset_manifest_files, output_dir / "dataset.sha256")
|
||||
|
||||
csv_path = output_dir / "results-reach.csv"
|
||||
fieldnames = ["type", "file", "sha256"]
|
||||
with csv_path.open("w", encoding="utf-8", newline="") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=fieldnames)
|
||||
writer.writeheader()
|
||||
for name, digest in hash_files(graphs):
|
||||
writer.writerow({"type": "graph", "file": name, "sha256": digest})
|
||||
for name, digest in hash_files(runtime):
|
||||
writer.writerow({"type": "runtime", "file": name, "sha256": digest})
|
||||
|
||||
summary = {
|
||||
"graphs": len(graphs),
|
||||
"runtime": len(runtime),
|
||||
"manifest": "dataset.sha256",
|
||||
}
|
||||
(output_dir / "results-reach.json").write_text(json.dumps(summary, indent=2), encoding="utf-8")
|
||||
|
||||
print(f"Wrote {csv_path} with {len(graphs)} graph(s) and {len(runtime)} runtime file(s)")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,33 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from tempfile import TemporaryDirectory
|
||||
import unittest
|
||||
|
||||
HARNESS_DIR = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(HARNESS_DIR))
|
||||
|
||||
import run_reachability # noqa: E402
|
||||
|
||||
|
||||
class ReachabilityBenchTests(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.graphs = [HARNESS_DIR / "inputs" / "graphs" / "sample-graph.json"]
|
||||
self.runtime = [HARNESS_DIR / "inputs" / "runtime" / "sample-runtime.ndjson"]
|
||||
|
||||
def test_manifest_includes_files(self):
|
||||
with TemporaryDirectory() as tmp:
|
||||
out_dir = Path(tmp)
|
||||
manifest_path = out_dir / "dataset.sha256"
|
||||
run_reachability.write_manifest(self.graphs + self.runtime, manifest_path)
|
||||
text = manifest_path.read_text(encoding="utf-8")
|
||||
self.assertIn("sample-graph.json", text)
|
||||
self.assertIn("sample-runtime.ndjson", text)
|
||||
|
||||
def test_hash_files(self):
|
||||
hashes = dict(run_reachability.hash_files(self.graphs))
|
||||
self.assertIn("sample-graph.json", hashes)
|
||||
self.assertEqual(len(hashes), 1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user