import os
import torch
from ..config import hyper
from ..models import Seq2umt
from ..preprocess.DataPreProcessor import DataPreProcessor
from tqdm import tqdm
from typing import List, Dict

BackgroundGenerator = lambda x: x


class Predictor:
    def __init__(self, best_model: str = None):
        if best_model is None:
            self.best_model = "{}/{}".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
                                             "best_model_sequmt/nyt_seq2umt_ops_best")
        else:
            self.best_model = best_model
        self.device = torch.device(hyper.device)
        print(self.device)
        self.model = None
        self.result_list = {"text": [], "predict_triple": []}
        self.preprocessor = None

    def _init_model(self):
        name = hyper.model
        p = {
            "seq2umt": Seq2umt,
        }
        self.model = p[name](hyper).to(self.device)

    def _load_model(self):
        self.model.load_state_dict(
            torch.load(self.best_model, map_location=self.device)
        )

    def putinData(self, sentence_list: List = None, use_dataset_file=True, dataset_file=None, ):
        self.preprocessor = DataPreProcessor(
            raw_data_list=sentence_list,
            use_dataset_file=use_dataset_file,
            dataset_file=dataset_file if use_dataset_file and os.path.exists(dataset_file) else None
        )
        return self

    def predict(self):
        print("------predicter  predict -----")
        assert self.preprocessor is not None
        hyper.vocab_init()
        self._init_model()  # 初始化模型文件
        self._load_model()  # 加载训练好的模型参数 self.best_model
        loader = self.preprocessor.process().get_data_loader()
        self._evaluation(loader)
        return self

    def _evaluation(self, loader):
        self.model.eval()
        process_bar = tqdm(enumerate(BackgroundGenerator(loader)), total=len(loader))
        with torch.no_grad():
            for batch_ndx, sample in process_bar:
                output = self.model(sample, is_train=False)
                self.result_list["text"].extend(output["text"])
                self.result_list["predict_triple"].extend(output["decode_result"])

    def get_result(self) -> Dict:
        return self.result_list
