"""
AI服务接口模块
基于cowain-ai.py的配置，提供与AI模型交互的统一接口
"""
import asyncio
from typing import List, Dict, Optional, Callable
from openai import OpenAI
import threading
import queue


class AIChatService:
    """AI聊天服务类，封装与AI模型的交互逻辑"""

    def __init__(self):
        """
        初始化AI服务，使用cowain-ai.py中的配置
        """
        # 使用与cowain-ai.py相同的配置
        self.client = OpenAI(
            api_key="123",  # 对应 OPENAI_API_KEY
            base_url="http://172.27.254.237:8080/v1"  # 对应 OPENAI_API_BASE
        )
        self.model = "Qwen3-Next-80B-A3B-Instruct"

        # 聊天历史记录
        self.chat_history: List[Dict[str, str]] = []

        # 系统提示词
        self.system_prompt = "你是一个乐于助人的 AI 助手。"

        # 回调函数，用于异步通知
        self.response_callback: Optional[Callable] = None

    def set_response_callback(self, callback: Callable[[str], None]):
        """设置响应回调函数，用于异步接收AI回复"""
        self.response_callback = callback

    def add_message(self, role: str, content: str):
        """添加消息到聊天历史"""
        self.chat_history.append({"role": role, "content": content})

    def clear_history(self):
        """清空聊天历史"""
        self.chat_history = []
        # 重新添加系统提示
        self.chat_history.append({"role": "system", "content": self.system_prompt})

    def get_history(self) -> List[Dict[str, str]]:
        """获取聊天历史"""
        return self.chat_history.copy()

    def chat_sync(self, message: str) -> str:
        """
        同步聊天方法
        :param message: 用户消息
        :return: AI回复
        """
        try:
            # 添加用户消息
            self.add_message("user", message)

            # 调用AI API
            response = self.client.chat.completions.create(
                model=self.model,
                messages=self.chat_history,
                temperature=0.7,
                max_tokens=2048
            )

            ai_response = response.choices[0].message.content

            # 添加AI回复到历史
            self.add_message("assistant", ai_response)

            return ai_response

        except Exception as e:
            error_msg = f"聊天出错: {str(e)}"
            print(error_msg)
            return error_msg

    def chat_async(self, message: str):
        """
        异步聊天方法，在新线程中处理
        :param message: 用户消息
        """
        def chat_worker():
            try:
                # 添加用户消息
                self.add_message("user", message)

                # 调用AI API
                response = self.client.chat.completions.create(
                    model=self.model,
                    messages=self.chat_history,
                    temperature=0.7,
                    max_tokens=2048
                )

                ai_response = response.choices[0].message.content

                # 添加AI回复到历史
                self.add_message("assistant", ai_response)

                # 通过回调函数发送结果
                if self.response_callback:
                    self.response_callback(ai_response)

            except Exception as e:
                error_msg = f"聊天出错: {str(e)}"
                print(error_msg)
                if self.response_callback:
                    self.response_callback(error_msg)

        # 在新线程中执行聊天
        thread = threading.Thread(target=chat_worker, daemon=True)
        thread.start()

    def test_connection(self) -> bool:
        """
        测试与AI服务的连接
        :return: 连接是否成功
        """
        try:
            response = self.client.chat.completions.create(
                model=self.model,
                messages=[
                    {"role": "system", "content": "你是一个测试助手。"},
                    {"role": "user", "content": "测试连接，请回复'连接成功'"}
                ],
                temperature=0.1,
                max_tokens=10
            )
            return "连接成功" in response.choices[0].message.content
        except Exception as e:
            print(f"连接测试失败: {e}")
            return False


# 全局AI服务实例
ai_service = AIChatService()

# 初始化时添加系统提示
ai_service.add_message("system", ai_service.system_prompt)