import os
import json
import pathlib
from os.path import join
from multiprocessing import Pool

def do_it(orig_dir, to_art_dir, to_abs_dir, files, process_i, process_n):
    i = process_i
    while i < len(files):
        file = files[i]
        print('processing : {}/{}\r'.format(i, len(files)), end='')
        path = pathlib.Path(file)
        name = path.stem
        with open(join(orig_dir,file), 'r') as f:
            data = json.load(f)
        art = ' '.join(data['article'])
        abs = ' '.join(data['abstract'])
        with open(join(to_art_dir, name), 'w') as f:
            f.write(art)
        with open(join(to_abs_dir, name), 'w') as f:
            f.write(abs)
        i += process_n

def extract(from_dir, to_art_dir, to_abs_dir, split):
    """
    from_dir: 包含论文作者提供的0.json之类的文件
    to_art_dir: 用于存储原文的目录
    to_abs_dir: 用于存储摘要的目录
    split: [train, test, val]
    """
    orig_dir = join(from_dir, split)
    to_art_dir = join(to_art_dir, split)
    to_abs_dir = join(to_abs_dir, split)

    files = os.listdir(orig_dir)
    process_n = 16
    args = [[orig_dir, to_art_dir, to_abs_dir, files, i, process_n] for i in range(process_n)]
    with Pool(process_n) as p:
        p.starmap(do_it, args)



if __name__ == '__main__':
    from_dir = '/data/liuhongfei/z/finished_files_openie_3'
    to_art_dir = '/data/liuhongfei/z/cnndm_articles'
    to_abs_dir = '/data/liuhongfei/z/cnndm_abstract'

    #extract(from_dir, to_art_dir, to_abs_dir, 'test')
    #extract(from_dir, to_art_dir, to_abs_dir, 'val')
    extract(from_dir, to_art_dir, to_abs_dir, 'train')
