import os
import threading
from threading import Condition
import json
import pathlib
from typing import List, Callable

import tqdm

from backend.experiment.framework.task import Task, TaskHandler
from backend.experiment.framework.log import Log


class ExecuteThreadExitTask(Task):
    def __init__(self):
        super().__init__(-1)


class ExecuteThread(threading.Thread):
    def __init__(
            self,
            task_handler: TaskHandler,
            task_queue: list,
            task_cond: Condition,
            result_cond: Condition,
            index: int
    ):
        super().__init__(daemon=True)
        self.__task_queue = task_queue
        self.__verbose = None
        self.__result_file = None
        self.__batch_size = None
        self.__progress_bar = None
        self.__task_handler = task_handler
        self.__index = index
        self.__task_cond = task_cond
        self.__result_cond = result_cond

    def initialize_execution(
            self, result_file,
            verbose: bool,
            batch_size: int,
            progress_bar: tqdm.tqdm
    ):
        self.__result_file = result_file
        self.__verbose = verbose
        self.__batch_size = batch_size
        self.__progress_bar = progress_bar

    def finalize_execution(self):
        self.__result_file = None
        self.__verbose = False
        self.__batch_size = None
        self.__progress_bar = None

    def run(self) -> None:
        if self.__verbose:
            Log(f'ExecutorThread-{self.__index}-{self.name} started')
        while True:
            with self.__task_cond:
                while len(self.__task_queue) == 0:
                    self.__task_cond.wait()
                batch_tasks = []
                exit_signal = False
                while len(batch_tasks) < self.__batch_size and len(
                        self.__task_queue) > 0:
                    task: Task = self.__task_queue[0]
                    if isinstance(task, ExecuteThreadExitTask):
                        exit_signal = True
                        Log(f'ExecutorThread-{self.__index}-{self.name}'
                            f' received exit signal', -1)
                        self.__task_cond.notify_all()
                        break
                    self.__task_queue.pop(0)
                    batch_tasks.append(task)
            for task in batch_tasks:
                result = self.__task_handler.handle(task)
                with self.__result_cond:
                    self.__result_file.write(json.dumps({
                        'id': task.task_id,
                        'result': result
                    }, ensure_ascii=False) + '\n')
                    self.__result_file.flush()
                    self.__progress_bar.update(1)
                    self.__result_cond.notify_all()
            if exit_signal:
                break
        Log(f'ExecutorThread-{self.__index}-{self.name}'
            f' received exiting', -1)


class Executor:
    def __init__(
            self,
            execute_threads: int,
            task_handler_builder: Callable[[], TaskHandler]
    ):
        self.__task_queue = []
        self.__task_cond = Condition()
        self.__result_cond = Condition()
        self.__threads = [
            ExecuteThread(
                task_handler=task_handler_builder(),
                task_queue=self.__task_queue,
                task_cond=self.__task_cond,
                result_cond=self.__result_cond,
                index=i
            ) for i in range(execute_threads)
        ]
        for each in self.__threads:
            each.start()

    def execute(
            self,
            result_file_path: str,
            tasks: List[Task],
            verbose: bool = True,
            batch_size: int = 1
    ):
        if not os.path.exists(result_file_path):
            pathlib.Path(result_file_path).touch()

        exclude_task_id = set()
        with open(result_file_path, 'r', encoding='utf8') as file:
            for each in file.readlines():
                result = json.loads(each)
                exclude_task_id.add(result['id'])
        if verbose:
            Log(f'completed: {len(exclude_task_id)}')
        with self.__task_cond:
            assert len(self.__task_queue) == 0
            for each in tasks:
                if each is not None and each.task_id not in exclude_task_id:
                    self.__task_queue.append(each)
        if verbose:
            Log(f'tasks execution begins, total: {len(self.__task_queue)}')
        with tqdm.tqdm(total=len(self.__task_queue), disable=not verbose) as \
                progress_bar:
            with open(result_file_path, 'a', encoding='utf8') as file:
                for each in self.__threads:
                    each.initialize_execution(
                        result_file=file,
                        verbose=verbose,
                        batch_size=batch_size,
                        progress_bar=progress_bar
                    )
                with self.__task_cond:
                    self.__task_cond.notify_all()
                with self.__result_cond:
                    while progress_bar.n < progress_bar.total:
                        self.__result_cond.wait()
                assert len(self.__task_queue) == 0
                for each in self.__threads:
                    each.finalize_execution()
        if verbose:
            Log('Execution completed')

    def __del__(self):

        with self.__task_cond:
            assert len(self.__task_queue) == 0
            for each in self.__threads:
                each.initialize_execution('', False, 1, None)
            self.__task_queue.append(ExecuteThreadExitTask())
            self.__task_cond.notify_all()
            Log(f'Executor notify worker thread to join joined', -1)
        for index, each in enumerate(self.__threads):
            Log(f'ExecuteThread-{index} joined', -1)
            each.join()
