import json
from jieba import lcut
from glob import glob

import numpy as np
import pandas as pd
from tqdm import tqdm
from multiprocessing import Pool


def to_pickle(line_id, data):
    index_id = 0
    data_dict = dict()
    bar = tqdm(range(len(data)))
    while len(data):
        bar.update(1)
        try:
            line = data.pop()
            one = lcut(json.loads(line)["text"])
            data_dict[index_id] = one
            index_id += 1
            if index_id % 30000 == 0:
                pd.to_pickle(data_dict, "F:/skypile_jieba/voc_{}_{}.pkl".format(line_id, index_id), compression="zip")
                data_dict = dict()
        except:
            pass
    pd.to_pickle(data_dict, "F:/skypile_jieba/voc_{}_{}.pkl".format(line_id, index_id), compression="zip")


if __name__ == '__main__':

    pool = Pool(processes=20)
    p_list = []
    line_id = 0+15+10+10+10+10+10
    for one_path in glob("F:/skypile/*")[15+10+10+10+10+10:15+10+10+10+10+10+10]:
        with open(one_path, "r", encoding="utf-8") as f:
            data = f.readlines()

        line_id += 1
        p = pool.apply_async(func=to_pickle, args=(line_id, data))
        p_list.append
    for p in tqdm(p_list):
        p.get()
    pool.close()
    pool.join()
