from paddlenlp.transformers import ProphetNetTokenizer
import tqdm
import sys
import os
import json

fin_dir = sys.argv[1]
fout_dir = sys.argv[2]


def write_json_lines(lines, out_path):
    with open(out_path, 'w+', encoding='utf-8') as f:
        for line in lines:
            line = json.dumps(line, ensure_ascii=False) + '\n'
            f.write(line)


tok = ProphetNetTokenizer.from_pretrained('bert-base-uncased')


def convert_cased2uncased(fin_dir, fout_dir):
    def tokenize(line):
        org = line.strip().replace(" ##", "")
        new = tok.tokenize(org)
        new_line = " ".join(new)
        return new_line

    if not os.path.exists(fout_dir):
        os.mkdir(fout_dir)

    for file in ['train', 'dev', 'test']:
        src_file = os.path.join(fin_dir, '{}.src'.format(file))
        tgt_file = os.path.join(fin_dir, '{}.tgt'.format(file))
        lines = []
        for src, tgt in tqdm(zip(open(src_file), open(tgt_file))):
            src = tokenize(src)
            tgt = tokenize(tgt)
            lines.append({'src': src, 'tgt': tgt})
        write_json_lines(lines, os.path.join(fout_dir, '{}.json'.format(file)))


convert_cased2uncased(fin_dir, fout_dir)
