peS2o / prepare_for_release.py
soldni's picture
:reliability of download
72140da
"""
Prepares PES2O for release on the HugginFace hub.
Author: Luca Soldaini (@soldni)
"""
import argparse
import json
from contextlib import ExitStack
from functools import partial
from multiprocessing import Manager, Pool, cpu_count, set_start_method
from pathlib import Path
from queue import Queue
from threading import Thread
from time import sleep
import re
from typing import List, Optional, Tuple
from smashed.utils import (
MultiPath,
recursively_list_files,
)
import smart_open
from tqdm import tqdm
def process_single(
io_paths: Tuple[List[MultiPath], MultiPath],
version: str,
pbar_queue: Optional[Queue] = None,
):
all_src, dst = io_paths
docs_cnt = 0
if dst.is_local:
Path(dst.as_str).parent.mkdir(parents=True, exist_ok=True)
with smart_open.open(dst.as_str, "wt") as out_stream:
for src in all_src:
with smart_open.open(src.as_str, "rt") as in_stream:
for line in in_stream:
data = json.loads(line)
data.pop("metadata", None)
data["source"] = (
("s2ag" if "dataset=s2ag" in src.as_str else "s2orc") +
'/' +
("train" if "split=train" in src.as_str else "valid")
)
data["version"] = version
out_stream.write(json.dumps(data) + "\n")
docs_cnt += 1
if pbar_queue is not None and docs_cnt % 10000 == 0:
pbar_queue.put((0, docs_cnt))
docs_cnt = 0
if pbar_queue is not None:
pbar_queue.put((1, 0))
if pbar_queue is not None:
pbar_queue.put((0, docs_cnt))
def threaded_progressbar(q: Queue, timeout: float, total_files: Optional[int] = None):
with ExitStack() as stack:
files_pbar = stack.enter_context(
tqdm(desc=" Files", unit="files", position=0, total=total_files)
)
docs_pbar = stack.enter_context(
tqdm(desc=" Docs", unit=" docs", position=1, unit_scale=True)
)
while True:
item = q.get()
if item is None:
break
else:
files, docs = item
files_pbar.update(files)
docs_pbar.update(docs)
sleep(timeout)
def main():
ap = argparse.ArgumentParser()
ap.add_argument("src", type=str, help="Source path")
ap.add_argument("dst", type=str, help="Destination path")
ap.add_argument("--debug", default=False, help="Debug mode", action="store_true")
ap.add_argument(
"--parallel", type=int, default=cpu_count(), help="Number of parallel processes"
)
ap.add_argument(
"-v", "--version", type=str, required=True, help="Version of the dataset"
)
opts = ap.parse_args()
src = MultiPath.parse(opts.src)
dst = MultiPath.parse(opts.dst)
# catch all in top level directory
grouped_src_paths = {}
for single_src in recursively_list_files(src):
single_src_dir, _ = single_src.rsplit("/", 1)
grouped_src_paths.setdefault(single_src_dir, []).append(single_src)
src_paths: List[List[MultiPath]] = []
dst_paths: List[MultiPath] = []
for dir_name, dir_values in grouped_src_paths.items():
src_paths.append([MultiPath.parse(p) for p in dir_values])
dir_path = MultiPath.parse(dir_name.replace("part_id=", "") + ".gz")
dst_path = dst / (diff) if len(diff := (dir_path - src)) > 0 else dst
dst_path.path = re.sub(r'dataset=(\w+)/', '\\1.', dst_path.path)
dst_path.path = re.sub(r'split=(\w+)/', '\\1.', dst_path.path)
dst_paths.append(dst_path)
if opts.debug:
with tqdm(total=len(src_paths)) as pbar:
for single_src, single_dst in zip(src_paths, dst_paths):
process_single((single_src, single_dst), version=opts.version)
pbar.update(1)
else:
set_start_method("spawn")
with Pool(processes=opts.parallel) as pool:
pbar_queue: Queue = (manager := Manager()).Queue()
pbar_thread = Thread(
target=threaded_progressbar,
args=(pbar_queue, 0.1, sum(len(p) for p in src_paths)),
daemon=True,
)
pbar_thread.start()
for _ in pool.imap_unordered(
partial(process_single, pbar_queue=pbar_queue, version=opts.version),
tuple(zip(src_paths, dst_paths)),
):
...
pool.close()
pool.join()
pbar_queue.put(None)
pbar_thread.join()
manager.shutdown()
if __name__ == "__main__":
main()