| |
| """Package release data directories into per-community tar.zst archives.""" |
|
|
| from __future__ import annotations |
|
|
| import argparse |
| import hashlib |
| import json |
| import shutil |
| import subprocess |
| from datetime import datetime, timezone |
| from pathlib import Path |
|
|
|
|
| HF_DIR = Path(__file__).resolve().parents[1] |
| DATA_DIR = HF_DIR / "data" |
| ARCHIVES_DIR = HF_DIR / "archives" |
| ARCHIVES_MANIFEST = HF_DIR / "archives_manifest.json" |
| RELEASE_MANIFEST = HF_DIR / "release_manifest.json" |
|
|
|
|
| def community_key(path: Path) -> int: |
| return int(path.name.rsplit("_", 1)[1]) |
|
|
|
|
| def sha256_file(path: Path, chunk_size: int = 1024 * 1024 * 16) -> str: |
| digest = hashlib.sha256() |
| with path.open("rb") as handle: |
| for chunk in iter(lambda: handle.read(chunk_size), b""): |
| digest.update(chunk) |
| return digest.hexdigest() |
|
|
|
|
| def load_json(path: Path) -> dict: |
| if not path.exists(): |
| return {} |
| return json.loads(path.read_text(encoding="utf-8")) |
|
|
|
|
| def write_json(path: Path, data: object) -> None: |
| path.write_text( |
| json.dumps(data, ensure_ascii=False, indent=2) + "\n", |
| encoding="utf-8", |
| ) |
|
|
|
|
| def archive_is_current(record: dict | None, archive_path: Path) -> bool: |
| return ( |
| bool(record) |
| and archive_path.exists() |
| and record.get("size_bytes") == archive_path.stat().st_size |
| and bool(record.get("sha256")) |
| ) |
|
|
|
|
| def build_archive(community_dir: Path, archive_path: Path, compression_level: int) -> None: |
| archive_path.parent.mkdir(parents=True, exist_ok=True) |
| tmp_path = archive_path.with_suffix(archive_path.suffix + ".tmp") |
| if tmp_path.exists(): |
| tmp_path.unlink() |
|
|
| compressor = f"zstd -T0 -{compression_level}" |
| cmd = [ |
| "tar", |
| "--use-compress-program", |
| compressor, |
| "-cf", |
| str(tmp_path.relative_to(HF_DIR)), |
| str(community_dir.relative_to(HF_DIR)), |
| ] |
| subprocess.run(cmd, cwd=HF_DIR, check=True) |
| tmp_path.replace(archive_path) |
|
|
|
|
| def main() -> None: |
| parser = argparse.ArgumentParser() |
| parser.add_argument("--level", type=int, default=6, help="zstd compression level") |
| parser.add_argument("--force", action="store_true", help="rebuild existing archives") |
| args = parser.parse_args() |
|
|
| if not shutil.which("zstd"): |
| raise SystemExit("zstd is required but was not found on PATH") |
|
|
| community_dirs = sorted( |
| (path for path in DATA_DIR.glob("community_*") if path.is_dir()), |
| key=community_key, |
| ) |
| existing_manifest = load_json(ARCHIVES_MANIFEST) |
| existing_records = { |
| record["community_id"]: record |
| for record in existing_manifest.get("archives", []) |
| if "community_id" in record |
| } |
|
|
| records = [] |
| for index, community_dir in enumerate(community_dirs, start=1): |
| community_id = community_dir.name |
| archive_path = ARCHIVES_DIR / f"{community_id}.tar.zst" |
| existing_record = existing_records.get(community_id) |
|
|
| print(f"[{index}/{len(community_dirs)}] {community_id}", flush=True) |
| if args.force or not archive_is_current(existing_record, archive_path): |
| build_archive(community_dir, archive_path, args.level) |
|
|
| size_bytes = archive_path.stat().st_size |
| digest = ( |
| existing_record["sha256"] |
| if archive_is_current(existing_record, archive_path) |
| else sha256_file(archive_path) |
| ) |
| records.append( |
| { |
| "community_id": community_id, |
| "archive_path": f"archives/{archive_path.name}", |
| "extracts_to": f"data/{community_id}", |
| "size_bytes": size_bytes, |
| "sha256": digest, |
| } |
| ) |
|
|
| manifest = { |
| "created_at": datetime.now(timezone.utc).isoformat(), |
| "compression": { |
| "format": "tar.zst", |
| "zstd_level": args.level, |
| "command_template": "tar --use-compress-program 'zstd -T0 -LEVEL' -cf archives/community_N.tar.zst data/community_N", |
| }, |
| "num_archives": len(records), |
| "total_size_bytes": sum(record["size_bytes"] for record in records), |
| "archives": records, |
| } |
| write_json(ARCHIVES_MANIFEST, manifest) |
|
|
| release_manifest = load_json(RELEASE_MANIFEST) |
| if release_manifest: |
| release_manifest["archives_manifest"] = "archives_manifest.json" |
| release_manifest["archive_policy"] = ( |
| "Data is published as one tar.zst archive per release community. " |
| "Each archive extracts to the data_path referenced by the question JSON." |
| ) |
| write_json(RELEASE_MANIFEST, release_manifest) |
|
|
| print(json.dumps(manifest, ensure_ascii=False, indent=2), flush=True) |
|
|
|
|
| if __name__ == "__main__": |
| main() |
|
|