import abc
import os
from os.path import join, exists
from threading import Lock
from typing import List
from backend.experiment.framework.data import Sample, PromptSample
from backend.experiment.framework.task import TaskMaker, Task, TaskHandler, \
    PromptPerplexityTaskMaker, \
    PromptPerplexityTaskHandler
from backend.experiment.framework.tokenizer import BaseTokenizer
from backend.experiment.framework.log import Log
from backend.experiment.model import context_limit


class BaseImplement(metaclass=abc.ABCMeta):
    __refs = 0
    __refs_lock = Lock()

    def __init__(self, *args, **kwargs):
        self.__new_instance(*args, **kwargs)

    def __del__(self):
        self.__del_instance()

    @abc.abstractmethod
    def get_task_maker(self) -> TaskMaker:
        pass

    @abc.abstractmethod
    def get_tasks(self, *args, **kwargs) -> List[Task]:
        pass

    @classmethod
    def work_dir(cls) -> str:
        raise NotImplementedError

    @classmethod
    def tokenizer(cls) -> BaseTokenizer:
        raise NotImplementedError

    @classmethod
    def _initialize(cls, *args, **kwargs):
        raise NotImplementedError

    @classmethod
    def _finalize(cls):
        raise NotImplementedError

    @classmethod
    def get_task_handler(cls) -> TaskHandler:
        raise NotImplementedError

    @classmethod
    def __new_instance(cls, *args, **kwargs):
        with cls.__refs_lock:
            if cls.__refs == 0:
                cls._initialize(*args, **kwargs)
            cls.__refs += 1

    @classmethod
    def __del_instance(cls):
        with cls.__refs_lock:
            cls.__refs -= 1
            if cls.__refs == 0:
                cls._finalize()


class ImplementKnowsTestSamples(BaseImplement, metaclass=abc.ABCMeta):
    @classmethod
    def get_test_samples(cls) -> List[Sample]:
        raise NotImplementedError


class PromptPerplexityImplement(
    ImplementKnowsTestSamples,
    metaclass=abc.ABCMeta
):
    __tokenizer = None
    __test_samples: List[Sample] = []

    def __init__(
            self,
            tokenizer: BaseTokenizer,
            make_task_processes: int = 16,
            make_task_batch: int = 128,
            tokenize_processes: int = 16,
            tokenize_batch: int = 128
    ):
        super().__init__(
            tokenizer=tokenizer,
            make_task_processes=make_task_processes,
            make_task_batch=make_task_batch,
            tokenize_processes=tokenize_processes,
            tokenize_batch=tokenize_batch
        )

    @classmethod
    def test_dev(cls) -> bool:
        return False

    def get_task_maker(self) -> PromptPerplexityTaskMaker:
        return PromptPerplexityTaskMaker(
            token_limit=context_limit,
            tokenizer=self.tokenizer()
        )

    def get_tasks(
            self, insert_start_token: bool,
            cache_filename: str,
            make_task_processes: int = 16,
            make_task_batch: int = 128
    ) -> List[Task]:
        Log(f'getting task, log_level: {Log.log_level()}')
        assert len(cache_filename) > 0
        if not cache_filename.endswith('.json'):
            cache_filename += '.json'
        cache_path = join(self.cache_dir(), cache_filename)
        task_maker = self.get_task_maker()
        if not exists(self.cache_dir()):
            os.makedirs(self.cache_dir())
        if not exists(cache_path):
            tasks = task_maker.make_tasks(
                samples=self.get_test_samples(),
                insert_start_token=insert_start_token,
                multiprocessing=make_task_processes,
                verbose=Log.log_level() <= 0,
                multiprocessing_batch=make_task_batch
            )
            task_maker.save_tasks(tasks, cache_path)
        else:
            tasks = task_maker.load_tasks(cache_path)
        Log(f'got task')
        return tasks

    @classmethod
    def get_test_samples(cls) -> List[Sample]:
        return cls.__test_samples

    @classmethod
    def tokenizer(cls) -> BaseTokenizer:
        return cls.__tokenizer

    @classmethod
    def get_task_handler(cls) -> PromptPerplexityTaskHandler:
        from backend.experiment.model import model
        return PromptPerplexityTaskHandler(model=model)

    @classmethod
    def cache_dir(cls) -> str:
        raise NotImplementedError

    @classmethod
    def get_test_sample_prompt(cls) -> List[str]:
        raise NotImplementedError

    @classmethod
    def _initialize(
            cls,
            tokenizer: BaseTokenizer,
            make_task_processes: int, make_task_batch: int,
            tokenize_processes: int, tokenize_batch: int
    ):
        cls.__tokenizer = tokenizer
        prompts = cls.get_test_sample_prompt()
        cls.__test_samples.clear()
        for i, each in enumerate(prompts):
            cls.__test_samples.append(PromptSample(sample_id=i, prompt=each))

    @classmethod
    def _finalize(cls):
        cls.__test_samples.clear()
