| |
| """Verify the built zkml-audit-benchmark HF dataset. |
| |
| Checks: |
| 1. Every file in MANIFEST.json exists and has the recorded SHA256. |
| 2. Every artifact JSON validates against the v2 schema (if jsonschema installed). |
| 3. Every artifact's pair_id exists in pairs.parquet. |
| 4. Parquet files load correctly via pyarrow. |
| |
| Usage (from the hf-dataset/ directory): |
| python scripts/verify_dataset.py |
| """ |
|
|
| from __future__ import annotations |
|
|
| import hashlib |
| import json |
| import sys |
| from pathlib import Path |
|
|
| try: |
| import pyarrow.parquet as pq |
| except ImportError: |
| sys.exit("pyarrow is required: pip install pyarrow") |
|
|
|
|
| def sha256_file(path: Path) -> str: |
| h = hashlib.sha256() |
| with open(path, "rb") as f: |
| for chunk in iter(lambda: f.read(1 << 16), b""): |
| h.update(chunk) |
| return h.hexdigest() |
|
|
|
|
| def verify(hf_root: Path) -> bool: |
| ok = True |
|
|
| |
| manifest_path = hf_root / "MANIFEST.json" |
| if not manifest_path.exists(): |
| print("FAIL: MANIFEST.json not found") |
| return False |
|
|
| with open(manifest_path, "r", encoding="utf-8") as f: |
| manifest = json.load(f) |
|
|
| entries = manifest.get("files", []) |
| print(f"Checking {len(entries)} manifest entries...") |
|
|
| for entry in entries: |
| fpath = hf_root / entry["path"] |
| if not fpath.exists(): |
| print(f" FAIL: missing {entry['path']}") |
| ok = False |
| continue |
| actual = sha256_file(fpath) |
| if actual != entry["sha256"]: |
| print(f" FAIL: sha256 mismatch for {entry['path']}") |
| print(f" expected: {entry['sha256']}") |
| print(f" actual: {actual}") |
| ok = False |
| else: |
| pass |
|
|
| if ok: |
| print(f" OK: all {len(entries)} files match") |
|
|
| |
| schema_path = hf_root / "schema" / "artifact.v2.schema.json" |
| try: |
| import jsonschema |
| with open(schema_path, "r", encoding="utf-8") as f: |
| schema = json.load(f) |
|
|
| artifact_dirs = [d for d in (hf_root / "artifacts").iterdir() if d.is_dir()] |
| art_count = 0 |
| for adir in sorted(artifact_dirs): |
| for af in sorted(adir.glob("*.json")): |
| with open(af, "r", encoding="utf-8") as f: |
| art = json.load(f) |
| aid = art.get("artifact_id", af.stem) |
| try: |
| jsonschema.validate(art, schema) |
| except jsonschema.ValidationError as e: |
| print(f" FAIL: {aid} — {e.message}") |
| ok = False |
| art_count += 1 |
|
|
| print(f" OK: {art_count} artifacts validated against v2 schema") |
| except ImportError: |
| print(" SKIP: jsonschema not installed; skipping artifact schema validation") |
|
|
| |
| pairs_path = hf_root / "data" / "pairs.parquet" |
| artifacts_path = hf_root / "data" / "artifacts.parquet" |
|
|
| if not pairs_path.exists(): |
| print("FAIL: data/pairs.parquet not found") |
| ok = False |
| if not artifacts_path.exists(): |
| print("FAIL: data/artifacts.parquet not found") |
| ok = False |
|
|
| if pairs_path.exists() and artifacts_path.exists(): |
| pairs_table = pq.read_table(pairs_path) |
| artifacts_table = pq.read_table(artifacts_path) |
|
|
| pair_ids = set(pairs_table.column("pair_id").to_pylist()) |
| art_pair_ids = set(artifacts_table.column("pair_id").to_pylist()) |
|
|
| orphan = art_pair_ids - pair_ids |
| if orphan: |
| print(f" FAIL: artifact pair_ids not in pairs table: {orphan}") |
| ok = False |
| else: |
| print(f" OK: {len(pairs_table)} pairs, {len(artifacts_table)} artifacts, all pair_ids valid") |
|
|
| |
| for pid in pair_ids: |
| expected = [ |
| r["artifact_count"] |
| for r in pairs_table.to_pylist() |
| if r["pair_id"] == pid |
| ][0] |
| actual = sum(1 for r in artifacts_table.to_pylist() if r["pair_id"] == pid) |
| if expected != actual: |
| print(f" FAIL: {pid} artifact_count={expected} but found {actual} artifact rows") |
| ok = False |
|
|
| |
| if ok: |
| print("\n✓ All checks passed") |
| else: |
| print("\n✗ Some checks failed") |
|
|
| return ok |
|
|
|
|
| if __name__ == "__main__": |
| hf_root = Path(__file__).resolve().parent.parent |
| sys.exit(0 if verify(hf_root) else 1) |
|
|