# -*- coding: utf-8 -*-
"""
@Env 
@Time 2024/9/3 下午4:10
@Author yzpang
@Function: 模型加载与卸载
"""
import abc
import os

import torch
import pandas as pd
from modelserver.configs.server_config import MULTI_CLASS
from modelserver.configs.model_config import DEFAULT_DEVICE
from ..inference.transformers_inference import text_classification


class BaseModelWorker:
    @abc.abstractmethod
    def load_model(self):
        """模型加载"""
        pass

    @abc.abstractmethod
    def unload_model(self):
        """模型卸载"""
        pass

    @abc.abstractmethod
    def handle_message(self, message):
        """
        处理接收的命令
        :param message: INFER hello,world
        :return:
        """
        pass


class PaddleModelWorker(BaseModelWorker):
    """
    基于Paddle框架实现模型加载/卸载
    """
    def __init__(self, model_name, model_path, task='text_classification', problem_type=MULTI_CLASS):
        self.tokenizer = None
        self.model = None
        self.cls = None
        self.task = task
        self.model_name = model_name
        self.model_path = model_path
        self.problem_type = problem_type

    def load_model(self):
        from paddlenlp import Taskflow

        self.cls = Taskflow(self.task, problem_type=self.problem_type, task_path=self.model_path, is_static_model=False)

    def unload_model(self):
        pass

    def handle_message(self, message):
        # 推理命令
        if message.startswith("INFER"):
            result = paddle_inference.inference_text(self.cls, self.task, message[6:])
            return result
        # 其他
        else:
            pass


class TransformersModelWorker(BaseModelWorker):
    """
    基于Transformers框架实现模型加载/卸载
    todo 待验证
    """

    def __init__(self, model_name, model_path, device=DEFAULT_DEVICE):
        self.tokenizer = None
        self.model = None
        self.model_name = model_name
        self.model_path = model_path
        self.device = device
        self.labels = None              # 标签列表

    def load_model(self):
        """
        加载模型
        :return:
        """
        from transformers import BertTokenizer, BertForSequenceClassification
        self.model = BertForSequenceClassification.from_pretrained(self.model_path)
        self.model.to(self.device)
        self.tokenizer = BertTokenizer.from_pretrained(self.model_path)
        self.labels = pd.read_table(os.path.join(self.model_path, 'label.txt'), header=None).iloc[:, 0].tolist()

    def unload_model(self):
        pass

    def handle_message(self, message):
        """
        模型推理
        :param message:
        :return:
        """
        print(f"handle_message: {message}")
        top_scores, top_indices = text_classification(self.model, self.tokenizer, message)
        print(top_scores, top_indices)
        return top_scores, [self.labels[i] for i in top_indices]

