import yaml
import asyncio
from config.PathConfig import PROMPTS_CONFIG
from integrations.ollama_client import OllamaClient
from tools.log_handler import logger
import re

class ChatHandler:
    def __init__(self, role="小李"):
        # 从 yaml 文件加载配置
        with open(PROMPTS_CONFIG, "r", encoding="utf-8") as f:
            prompts = yaml.safe_load(f)["prompts"]
        self.system_prompt = prompts[role]["template"]
        logger.info(f"ChatHandler 初始化完成，角色: {role}")

        self.client = OllamaClient()
        self.messages = {}  # 每个用户独立的消息历史
        logger.info("ChatHandler 初始化完成")
    
    async def process(self, user, content):
        """
        处理用户输入并生成回复
        
        :param user: 用户标识（微信好友名）
        :param content: 用户输入内容
        :return: 回复内容
        """
        if user not in self.messages:
            self.messages[user] = [{"role": "system", "content": self.system_prompt}]
        self.messages[user].append({"role": "user", "content": content})
        
        try:
            logger.info(f"消息{self.messages[user]}")
            response = await self.client.async_chat(self.messages[user])  # 加 await
            reply = response["message"]["content"]
            reply = re.sub(r"<think>.*?</think>", "", reply, flags=re.DOTALL).strip()
            self.messages[user].append({"role": "assistant", "content": reply})

            return reply
        except Exception as e:
            logger.error(f"处理 {user} 消息失败: {e}")
            return "我累了，明天再聊吧，晚安宝宝"