from typing import List
import ollama
from fastapi import requests
from langchain_core.embeddings import Embeddings
from langchain_ollama import ChatOllama
from langchain_core.language_models.chat_models import BaseChatModel
from langchain_core.messages import BaseMessage, HumanMessage, AIMessage
from langchain_core.outputs import ChatResult, ChatGeneration
import requests
import json
from langchain_openai import ChatOpenAI
from core.config import settings

# deepSeek大模型
def get_default_llm():
    return ChatOpenAI(
        model=settings.LLM_MODEL_NAME,
        temperature=settings.LLM_TEMPERATURE,
        streaming=settings.LLM_STREAMING,
        api_key=settings.LLM_API_KEY,
        base_url=settings.LLM_BASE_URL,

    )
# 通义千问大模型
def get_alibaba_llm():
    return ChatOpenAI(
        model=settings.LLM_MODEL_NAME_ALIBABA,
        temperature=settings.LLM_TEMPERATURE,
        streaming=settings.LLM_STREAMING,
        api_key=settings.LLM_API_KEY_ALIBABA,
        base_url=settings.LLM_BASE_URL_ALIBABA,

    )
    
# 智谱大模型
def get_zhipu_llm():
    return ChatOpenAI(
        model=settings.Zhipu_MODEL_NAME,
        temperature=settings.LLM_TEMPERATURE,
        streaming=settings.LLM_STREAMING,
        api_key=settings.Zhipu_API_KEY,
        max_tokens=4096,
        request_timeout=60
       
    )

# 本地部署模型
def get_ollama_llm():
    return ChatOllama(
        model=settings.LLM_MODEL_NAME_LOCAL,
        base_url=settings.LLM_BASE_URL_LOCAL,
        temperature=settings.LLM_TEMPERATURE,
        streaming=settings.LLM_STREAMING,
        timeout=60,
    )




# 自定义扣子ChatLLM类，继承自BaseChatModel
class CozeChatLLM(BaseChatModel):
    """
    扣子(Coze) API的自定义ChatLLM实现
    """
    
    api_key: str
    base_url: str
    bot_id: str
    user_id: str = "user_default"
    temperature: float = 0.7
    auto_save_history: bool = True
    
    def _generate(self, messages, stop=None, **kwargs):
        """
        生成聊天响应
        """
        # 准备扣子API请求数据
        headers = {
            "Authorization": f"Bearer {self.api_key}",
            "Content-Type": "application/json"
        }
        
        # 转换LangChain消息格式为扣子API格式
        additional_messages = []
        for message in messages:
            content_type = "text"
            # 如果是人类消息
            if isinstance(message, HumanMessage):
                additional_messages.append({
                    "role": "user",
                    "content": message.content,
                    "content_type": content_type
                })
            # 如果是AI消息
            elif isinstance(message, AIMessage):
                additional_messages.append({
                    "role": "assistant",
                    "content": message.content,
                    "content_type": content_type
                })
        
        # 构造请求数据
        payload = {
            "bot_id": self.bot_id,
            "user_id": self.user_id,
            "stream": False,
            "auto_save_history": self.auto_save_history,
            "additional_messages": additional_messages
        }
        
        try:
            # 发送请求
            response = requests.post(
                self.base_url,
                headers=headers,
                data=json.dumps(payload, ensure_ascii=False)
            )
            
            # 检查响应状态
            response.raise_for_status()
            
            # 解析响应数据
            response_data = response.json()
            
            # 提取回复内容
            if response_data.get("code") == 0 and "data" in response_data:
                reply_content = response_data["data"].get("messages", [{}])[0].get("content", "")
            else:
                # 处理API错误
                error_msg = response_data.get("msg", "未知错误")
                raise Exception(f"扣子API错误: {error_msg}")
            
            # 创建AIMessage
            ai_message = AIMessage(content=reply_content)
            
            # 创建ChatGeneration和ChatResult
            generation = ChatGeneration(message=ai_message)
            result = ChatResult(generations=[generation])
            
            return result
            
        except requests.RequestException as e:
            raise Exception(f"请求扣子API失败: {str(e)}")
        except json.JSONDecodeError:
            raise Exception("扣子API返回无效的JSON响应")
    
    @property
    def _llm_type(self):
        return "coze-chat"
    
    def _llm_default_params(self):
        return {}

# 扣子模型 - 使用自定义CozeChatLLM类
def get_coze_llm():
    """
    获取扣子模型实例
    使用自定义的CozeChatLLM类，正确处理扣子API的特殊请求格式
    """
    return CozeChatLLM(
        api_key=settings.COZE_LLM_API_KEY,
        base_url=settings.COZE_LLM_API_URL,
        bot_id=settings.COZE_LLM_AGENT_ID,
        user_id="user_default",  # 可以根据需要修改
        temperature=settings.LLM_TEMPERATURE,
        auto_save_history=True
    )
