from concurrent.futures import ProcessPoolExecutor
from multiprocessing import Manager
import traceback
import json
from common.configs.path import paths
from pathlib import Path
from collections import Counter

# title_t = ''
# content_t = ''
# tc_t = ''

WORKER_POOL_SIZE = 40  # you should set this as the number of your processes
QUEUE_SIZE = 100       # 10 times to your pool size is good enough


def save_file(text_, tag):
    with open(paths['data_dir'] / '{}.txt'.format(tag), 'a') as f:
        f.write(text_)


def main():

    with Manager() as manager:
        q = manager.Queue(QUEUE_SIZE)

        # init worker pool
        executor = ProcessPoolExecutor(max_workers=WORKER_POOL_SIZE)
        workers_pool = [executor.submit(worker, i, q)
                        for i in range(WORKER_POOL_SIZE)]

        # start producer
        run_producer(q)

        # wait to done
        for f in workers_pool:
            try:
                # save_file(f.result(), 'title_content')
                f.result()
            except Exception:
                traceback.print_exc()


def run_producer(q):
    try:
        with open(paths['pretrain_data']) as fp:
            for line in fp:
                q.put(line)
    except Exception:
        traceback.print_exc()
    finally:
        q.put(None)


def worker(i, q):

    # global title_t
    # global content_t
    # global tc_t
    while 1:
        line = q.get()
        if line is None:
            print(f'worker {i} is done')
            q.put(None)
            return
        text = json.loads(line.strip())
        title_ = text['title']
        content_ = text['content']
        # tc_t = title_ + '\n' + content_ + '\n\n'
        # save_file(tc_t, 'title_content')
        save_file(title_+'\n\n', 'title')
        save_file(content_+'\n\n', 'content')


if __name__ == '__main__':
    main()
