import os.path
import openai
import ollama
import logging
from config import aiConfig
from instructions.enum_types import InstructionType, ActionType
from tools import action_logger, parse_file_reply
from wechat.reply import Reply, ReplyType

logger = logging.getLogger(__name__)


class Instruction:
    def __init__(self, chatDBHandler=None, query=None):
        self.prompt = None
        self.query = query
        self.insType: InstructionType = None
        self.chatDBHandler = chatDBHandler
        self.desire_rtype = "text"

    @action_logger(ActionType.ASK_OLLAMA)
    async def query_ollama(self) -> str | None:
        """
        把prompt里的内容交给本地AI，获得答案
        如果出错，则返回None
        :return: 本地AI的回复
        """
        client = ollama.Client(host=aiConfig.OLLAMA_URL)
        try:
            self.prompt = self.prompt.replace("QUERY", self.query).replace("NICKNAME", self.chatDBHandler.nn)
            # print(self.prompt)
            local_models = client.list()
            models_list = local_models.models
            model_names = [model.model for model in models_list]
            if aiConfig.OLLAMA_MODEL in model_names:
                logger.info("[AI] Ollama model found")
                response = client.generate(aiConfig.OLLAMA_MODEL, prompt=self.prompt)
                logger.info(f"[OLLAMA] Response from ollma: {response.response}")
                """res: model='qwen2.5-coder:7b' created_at='2024-12-09T08:21:50.1190774Z' done=True 
                done_reason='stop' total_duration=484096000 load_duration=16017500 prompt_eval_count=30 
                prompt_eval_duration=373000000 eval_count=9 eval_duration=94000000 response='你好！有什么我可以帮忙的吗？' 
                context=[151644, 8948, 198, 2610, 525, 1207, 16948, 11, 3465, 553, 54364, 14817, 13, 1446, 525, 264, 
                10950, 17847, 13, 151645, 198, 151644, 872, 198, 108386, 151645, 198, 151644, 77091, 198, 108386, 
                6313, 104139, 109944, 106128, 9370, 101037, 11319]"""
                return response.response

            else:
                logger.error(f"[AI] Ollama model: {aiConfig.OLLAMA_MODEL} not found")
                self.chatDBHandler.add_actions(ActionType.OLLAMA_UNFOUND_MODEL)
                return
        except ConnectionError as e:
            logger.error(f"[AI] Ollama connection failed. Change to online AI: {e}")
            self.chatDBHandler.add_actions(ActionType.OLLAMA_NOT_RUNNING)
            return

    @action_logger(ActionType.ASK_OTHER_AI)
    async def query_other(self) -> str | None:
        """
        使用open AI lib 访问ai config中的url
        :return:
        """
        self.prompt = self.prompt.replace("[QUERY]", self.query).replace("[NICKNAME]", self.chatDBHandler.nn)
        try:
            client = openai.OpenAI(api_key=aiConfig.ONLINE_AI_KEY, base_url=aiConfig.ONLINE_AI_URL)
            response = client.chat.completions.create(
                model=aiConfig.ONLINE_MODEL,
                messages=[
                    {"role": "system", "content": aiConfig.CHARACTER_DESC},
                    {"role": "user", "content": self.prompt},
                ],
                max_tokens=1000,
                temperature=0.7
            )
            logger.info(f"[OPENAI] Response from OpenAI: {response.choices[0]}")
            return response.choices[0].message.content.strip()
        except Exception as e:
            logger.error(f"[AI] OpenAI request failed: {e}")
            self.chatDBHandler.add_actions(ActionType.ONLINE_AI_UNREACHABLE)
            return None

    async def handle_query(self) -> Reply:
        """
        Handle the request according to subclass instruction type
        :return: a raw Reply class object
        """
        raise NotImplementedError

    @action_logger(ActionType.PREPARE_RESPONSE)
    async def decorate_response(self, reply: Reply | None) -> Reply:
        logger.info(f"[build response] Decorate response: {reply.type.name}, "
                    f"{reply.content[:50] + '...' if len(reply.content) >= 50 else reply.content}")
        if reply is None:
            logger.error("[build response] Reply is None, response will be set as ERROR.")
            content = "额，额，我不知道发生了什么，混乱了...快摇我爹过来！"
            reply = Reply(ReplyType.ERROR, content)
        self.chatDBHandler.answer = reply.content
        if reply.type == ReplyType.FILE:
            if not os.path.exists(parse_file_reply(reply.content)[0]):
                logger.error(f"[build response] File not found: {parse_file_reply(reply.content)[0]}")
                reply.content = "对不起, 文件好像不见了，要不你呼叫我爹来看看吧[委屈].."
        reply.content = str(reply.content)
        return reply
