from concurrent.futures import ProcessPoolExecutor
from multiprocessing import Manager
import traceback
import json
from common.configs.path import paths
from pathlib import Path
from collections import Counter


title_counter = Counter()
content_counter = Counter()
tc_counter = Counter()  # title + content

WORKER_POOL_SIZE = 40  # you should set this as the number of your processes
QUEUE_SIZE = 100       # 10 times to your pool size is good enough


def save_dict(path, dt):
    with open(path, 'w') as f:
        json.dump(dt, f, indent=4)
    print(path, 'saved.')


def main():

    tc_counter_all = Counter()

    with Manager() as manager:
        q = manager.Queue(QUEUE_SIZE)

        # init worker pool
        executor = ProcessPoolExecutor(max_workers=WORKER_POOL_SIZE)
        workers_pool = [executor.submit(worker, i, q)
                        for i in range(WORKER_POOL_SIZE)]

        # start producer
        run_producer(q)

        # wait to done
        for f in workers_pool:
            try:
                tc_counter_all += f.result()
            except Exception:
                traceback.print_exc()

    save_dict(paths['output'] / 'log' / 'tc_count.json', dict(tc_counter_all))


def run_producer(q):
    try:
        with open(paths['pre_train_data']) as fp:
            for line in fp:
                q.put(line)
    except Exception:
        traceback.print_exc()
    finally:
        q.put(None)


def worker(i, q):
    global title_counter
    global content_counter
    global tc_counter
    while 1:
        line = q.get()
        if line is None:
            print(f'worker {i} is done')
            q.put(None)
            return tc_counter
        text = json.loads(line.strip())
        title_list = text['title'].split(' ')
        content_list = text['content'].split(' ')

        title_counter += Counter(title_list)
        content_counter += Counter(content_list)
        tc_counter += Counter(title_list + content_list)


if __name__ == '__main__':
    main()