import logging
from typing import Optional, Dict, Callable
import g4f
import requests
from openai import AzureOpenAI, OpenAI
import config


class LLMHandler:
    """大模型处理器基类"""

    def __init__(self, **kwargs):
        self.params = kwargs

    def validate(self):
        """参数验证（子类可重写）"""
        pass

    def execute(self, messages: str) -> str:
        """执行生成任务（子类必须实现）"""
        raise NotImplementedError


class G4fHandler(LLMHandler):
    """处理g4f免费模型请求"""

    def execute(self, messages: str) -> str:
        model = self.params.get("model_name") or "gpt-3.5-turbo-16k-0613"
        response = g4f.ChatCompletion.create(
            model=model,
            messages=messages,
        )
        return response.replace("\n", "")


class QwenHandler(LLMHandler):
    """处理阿里云通义千问请求"""

    def validate(self):
        if not self.params.get("api_key"):
            raise ValueError("通义千问需要api_key参数")

    def execute(self, messages: str) -> str:
        import dashscope
        dashscope.api_key = self.params["api_key"]

        response = dashscope.Generation.call(
            model=self.params["model_name"],
            messages=messages
        )

        if response.status_code != 200:
            raise Exception(f"通义千问API错误: {response}")
        return response["output"]["text"].replace("\n", "")


class AzureHandler(LLMHandler):
    """处理Azure OpenAI请求"""
    REQUIRED_PARAMS = ["api_key", "model_name", "base_url", "api_version"]

    def validate(self):
        missing = [p for p in self.REQUIRED_PARAMS if not self.params.get(p)]
        if missing:
            raise ValueError(f"Azure缺失必要参数: {', '.join(missing)}")

    def execute(self, messages: str) -> str:
        client = AzureOpenAI(
            api_key=self.params["api_key"],
            api_version=self.params["api_version"],
            azure_endpoint=self.params["base_url"],
        )
        response = client.chat.completions.create(
            model=self.params["model_name"],
            messages=messages
        )
        return response.choices[0].message.content


class OpenAICompatibleHandler(LLMHandler):
    """处理OpenAI兼容API请求"""
    PROVIDER_DEFAULTS = {
        "moonshot": {
            "base_url": "https://api.moonshot.cn/v1",
            "model_name": "moonshot-v1-128k"
        },
        "ollama": {
            "api_key": "1",
            "base_url": "http://localhost:11434/v1",
            "model_name": "llama3.1:latest"
        },
        "deepseek": {
            "base_url": "https://api.deepseek.com/v1"
        },
        "openai": {
            "base_url": "https://api.openai.com/v1",
            "model_name": "gpt-3.5-turbo"
        },
        "oneapi": {}
    }

    def validate(self):
        provider = self.params["provider"]
        if not self.params.get("api_key") and provider != "ollama":
            raise ValueError(f"{provider} 需要api_key参数")
        if not self.params.get("model_name"):
            raise ValueError(f"{provider} 需要model_name参数")
        # if not self.params.get("base_url") and provider != "ollama":
        #     raise ValueError(f"{provider} 需要base_url参数")

    def execute(self, messages: str) -> str:
        provider = self.params["provider"]
        defaults = self.PROVIDER_DEFAULTS.get(provider, {})

        # 合并默认参数
        api_key = self.params.get("api_key") or defaults.get("api_key")
        base_url = self.params.get("base_url") or defaults.get("base_url")
        model_name = self.params.get("model_name") or defaults.get("model_name")

        client = OpenAI(api_key=api_key, base_url=base_url)
        response = client.chat.completions.create(
            model=model_name,
            messages=messages
        )
        return response.choices[0].message.content


class CloudflareHandler(LLMHandler):
    """处理Cloudflare Workers AI请求"""
    REQUIRED_PARAMS = ["api_key", "account_id", "model_name"]

    def validate(self):
        missing = [p for p in self.REQUIRED_PARAMS if not self.params.get(p)]
        if missing:
            raise ValueError(f"Cloudflare缺失必要参数: {', '.join(missing)}")

    def execute(self, messages: str) -> str:
        url = f"https://api.cloudflare.com/client/v4/accounts/{self.params['account_id']}/ai/run/{self.params['model_name']}"
        response = requests.post(
            url,
            headers={"Authorization": f"Bearer {self.params['api_key']}"},
            json={"messages": messages}
        )
        return response.json()["result"]["response"]


class ErnieHandler(LLMHandler):
    """处理百度文心一言请求"""
    REQUIRED_PARAMS = ["api_key", "secret_key", "base_url"]

    def validate(self):
        missing = [p for p in self.REQUIRED_PARAMS if not self.params.get(p)]
        if missing:
            raise ValueError(f"文心一言缺失必要参数: {', '.join(missing)}")

    def execute(self, messages: str) -> str:
        # 获取访问令牌
        token_response = requests.post(
            "https://aip.baidubce.com/oauth/2.0/token",
            params={
                "grant_type": "client_credentials",
                "client_id": self.params["api_key"],
                "client_secret": self.params["secret_key"]
            }
        )
        token = token_response.json()["access_token"]

        # 生成响应
        response = requests.post(
            f"{self.params['base_url']}?access_token={token}",
            json={"messages": messages},
            headers={"Content-Type": "application/json"}
        )
        return response.json().get("result", "")


# 供应商处理器映射表
HANDLERS: Dict[str, Callable] = {
    "g4f": G4fHandler,
    "qwen": QwenHandler,
    "azure": AzureHandler,
    "cloudflare": CloudflareHandler,
    "ernie": ErnieHandler,
    "openai": OpenAICompatibleHandler,
    "moonshot": OpenAICompatibleHandler,
    "ollama": OpenAICompatibleHandler,
    "deepseek": OpenAICompatibleHandler,
    "oneapi": OpenAICompatibleHandler
}


def _generate_response(
        messages: str,
        provider: str = config.llm_model,
        api_key: Optional[str] = config.llm_key,
        model_name: Optional[str] = config.model_name,
        base_url: Optional[str] = None,
        api_version: Optional[str] = None,
        secret_key: Optional[str] = None,
        account_id: Optional[str] = None,
        tools: list = None,  # 新增工具参数
) -> str:
    """
    使用指定的大模型生成文本回复

    支持的供应商及参数要求：
    - 'g4f'       : 免费模型，无需认证参数
    - 'openai'    : 需要api_key和model_name，默认模型gpt-3.5-turbo
    - 'azure'     : 需要api_key, model_name, base_url和api_version
    - 'moonshot'  : 需要api_key和model_name，默认API地址https://api.moonshot.cn/v1
    - 'ollama'    : 需要model_name，默认本地地址http://localhost:11434/v1
    - 'qwen'      : 需要api_key和model_name，需安装dashscope包
    - 'cloudflare': 需要api_key, account_id和model_name
    - 'ernie'     : 需要api_key, secret_key和base_url
    - 'deepseek'  : 需要api_key和model_name
    - 'oneapi'    : 需要api_key, model_name和base_url
    """
    try:
        # 获取处理器
        handler_class = HANDLERS.get(provider)
        if not handler_class:
            raise ValueError(f"不支持的供应商类型: {provider}")

        # 创建处理器实例
        handler = handler_class(
            provider=provider,
            api_key=api_key,
            model_name=model_name,
            base_url=base_url,
            api_version=api_version,
            secret_key=secret_key,
            account_id=account_id
        )

        # 验证参数并执行
        handler.validate()

        # 在调用前添加工具提示
        if tools and provider in ["openai", "azure", "moonshot", "deepseek"]:
            # 生成工具提示
            tool_prompt = TOOL_PROMPT_TEMPLATE.format(
                TOOL_DESCRIPTIONS=generate_tool_descriptions(tools)
            )

            # 添加到消息开头
            messages.insert(0, {"role": "system", "content": tool_prompt})

        return handler.execute(messages)

    except Exception as e:
        logging.error(f"{provider} 模型调用异常: {str(e)}")
        return f"错误: {str(e)}"


TOOL_PROMPT_TEMPLATE = """
你是一个智能助手，可以访问各种工具来帮助用户解决问题。
你可以使用的工具包括：

{TOOL_DESCRIPTIONS}

当用户请求需要工具辅助时，请使用以下格式回复：
TOOL_CALL: {{"name": "工具名称", "arguments": {{"参数1": "值1", "参数2": "值2"}}}}

如果你不需要使用工具，请直接回复答案。
"""


def generate_tool_descriptions(tools: list) -> str:
    """生成工具描述文本"""
    descriptions = []

    for tool in tools:
        func = tool["function"]
        desc = f"工具名称: {func['name']}\n描述: {func['description']}\n"

        # 添加参数描述
        if "parameters" in func:
            params = func["parameters"]["properties"]
            desc += "参数:\n"
            for param_name, param_def in params.items():
                desc += f"  - {param_name}: {param_def.get('description', '')}"
                if "enum" in param_def:
                    desc += f" (可选值: {', '.join(param_def['enum'])})"
                desc += "\n"

        descriptions.append(desc)

    return "\n\n".join(descriptions)


if __name__ == "__main__":
    """
    支持的供应商及参数要求：
    - 'g4f'       : 免费模型，无需认证参数
    - 'openai'    : 需要api_key和model_name，默认模型gpt-3.5-turbo
    - 'azure'     : 需要api_key, model_name, base_url和api_version
    - 'moonshot'  : 需要api_key和model_name，默认API地址https://api.moonshot.cn/v1
    - 'ollama'    : 需要model_name，默认本地地址http://localhost:11434/v1
    - 'qwen'      : 需要api_key和model_name，需安装dashscope包
                        - 'gemini'    : 需要api_key和model_name，需安装google-generativeai包
    - 'cloudflare': 需要api_key, account_id和model_name
    - 'ernie'     : 需要api_key, secret_key和base_url
    - 'deepseek'  : 需要api_key和model_name 模型名称：deepseek-chat  deepseek-reasoner
    - 'oneapi'    : 需要api_key, model_name和base_url
    """
    keywords_prompt = f"""
    扩写文案：
    我当然知道那不是我的月亮
    但有一刻
    月亮的确照在了我身上
    可生活不是电影
    我也缺少点运气
    我悄然触摸你
    却未曾料想
    你像蒲公英散开了
    到处啊
    都是你的模样
    """
    print("=========================================================")
    messages = [{"role": "user", "content": keywords_prompt}]
    print(_generate_response(messages))
