import json
import re
from os.path import join, abspath
from typing import List

from backend.experiment.framework.shots_prompt.implement import \
    CommonPerplexityClassificationImplement, \
    CommonShotsGenerator, \
    CommonPromptClassificationSample
from backend.experiment.framework.shots_prompt.task import \
    PerplexityPromptShotsClassificationTaskMaker
from backend.experiment.framework.task import TaskMaker
from backend.experiment.model import context_limit
from backend.experiment.tokenizer import tokenizer


class IFlyTekImplement(CommonPerplexityClassificationImplement):
    __mask_token = '[MASK]'
    __label_map = []
    __label_dict = {}
    __original_label2label_map_index = {}

    @classmethod
    def label_map(cls) -> List[str]:
        return cls.__label_map

    @classmethod
    def _get_record_label(cls, record) -> int:
        return cls.__original_label2label_map_index[int(record['label'])]

    @classmethod
    def _make_test_sample(cls, record, made_sample_num: int) -> \
            CommonPromptClassificationSample or \
            List[CommonPromptClassificationSample]:
        prompt = cls._make_prompt(record, True)
        return CommonPromptClassificationSample(
            prompt=prompt,
            target_mask_token=cls.__mask_token,
            sample_id=made_sample_num
        )

    @classmethod
    def _read_train_records(cls) -> list:
        with open(join(cls.data_dir(), 'train.json'), 'r', encoding='utf8') \
                as file:
            records = [json.loads(each) for each in file.readlines()]
        with open(join(cls.data_dir(), 'labels.json'), 'r', encoding='utf8') \
                as file:
            cls.__label_dict = {}
            for each in file.readlines():
                record = json.loads(each)
                cls.__label_dict[int(record['label'])] = record['label_des']
            for key in cls.__label_dict.keys():
                cls.__original_label2label_map_index[key] = len(cls.__label_map)
                cls.__label_map.append(cls.__label_dict[key])
        return records

    @classmethod
    def _read_dev_records(cls) -> list:
        with open(join(cls.data_dir(), 'dev.json'), 'r', encoding='utf8') \
                as file:
            records = [json.loads(each) for each in file.readlines()]
        return records

    @classmethod
    def _read_test_records(cls) -> list:
        with open(join(cls.data_dir(), 'test.json'), 'r', encoding='utf8') \
                as file:
            records = [json.loads(each) for each in file.readlines()]
        return records

    @classmethod
    def _make_prompt(cls, record, mask_target: bool) -> str or List[str]:
        sentence = cls.__clean_text(record['sentence'])
        label = cls.__mask_token if mask_target else record['label_des']
        return f'句子：“{sentence}”的分类是{label}。'

    def get_task_maker(self) -> TaskMaker:
        return PerplexityPromptShotsClassificationTaskMaker(
            token_limit=context_limit,
            tokenizer=tokenizer,
            shots_generator=CommonShotsGenerator(
                length_sorted_example_tokens=self.get_sorted_example_tokens()
            ),
            label_map=self.label_map()
        )

    @classmethod
    def data_dir(cls) -> str:
        return abspath(join(__file__, '../..'))

    @classmethod
    def cache_dir(cls) -> str:
        return join(cls.work_dir(), 'cache')

    @classmethod
    def work_dir(cls) -> str:
        return abspath(join(__file__, '..'))

    @staticmethod
    def __clean_text(text: str) -> str:
        return re.sub(r'(br/)|(&([lr])dquo)', '', text)
