import os
import httpx
import yaml

class LLM():
    def __init__(self, **kwargs):
        self.set_LLM(kwargs.get("model", "glm-4-flash"), kwargs.get("debug", True))

    def set_LLM(self, model_name, debug=False):
        self.model = model_name
        if self.model is None:
            return
        
        if "gpt" in self.model.lower():
            self.chat_bot = GPT(model=self.model, debug=debug)
        elif "glm" in self.model.lower():
            self.chat_bot = GLM(model=self.model, debug=debug)
        elif "deepseek" in self.model.lower():
            self.chat_bot = DeepSeek(model=self.model, debug=debug)
        elif "ernie" in self.model.lower():
            self.chat_bot = Baidu(model=self.model, debug=debug)
        else:
            print(f"[error] 不支持的模型: {model_name}.")

    def chat(self, prompt="Aloha, what version are you?", **kwargs):
        model_name = kwargs.get("model", self.model)
        if kwargs.get("debug", False):
            print(">> model_name: ", model_name)
        if self.model != model_name:
            self.set_LLM(model_name=model_name)
            
        return self.chat_bot.chat(prompt=prompt, **kwargs)



class LLM_base():
    def __init__(self, **kwargs):
        self.CURRENT_FOLDER = os.path.dirname(os.path.abspath(__file__))
        with open(os.path.join(self.CURRENT_FOLDER, 'config.yaml'), 'r', encoding='utf-8') as yaml_file:
            self.config = yaml.load(yaml_file, Loader=yaml.FullLoader)
        self.kwargs = kwargs

class GLM(LLM_base):
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        from zhipuai import ZhipuAI
        self.client = ZhipuAI(api_key=os.environ.get("ZHIPUAI_API_KEY", self.config['zhipuai_api_key']),
            max_retries=kwargs.get("max_retries", 5),
            timeout=kwargs.get("timeout", httpx.Timeout(60.0, read=20.0, write=20.0, connect=20.0)),
        )

    def chat(self, prompt="Aloha, what version are you?", **kwargs):
        for key in self.kwargs:
            if key not in kwargs:
                kwargs[key] = self.kwargs[key]
        try:
            tools = None
            if "web_search" in kwargs and kwargs["web_search"]:
                tools = [{
                    "type": "web_search",
                    "web_search": {
                        "enable": True,
                    }
                }]
            model_name = kwargs.get("model", "glm-4-flash")
            if kwargs.get("debug", False):
                print(">> model_name: ", model_name)
            response = self.client.chat.completions.create(
                messages=kwargs.get("messages", [
                    {
                        "role": "user",
                        "content": prompt,
                    }
                ]),
                tools=tools,
                model=model_name,
                extra_body={
                    "temperature": kwargs.get("temperature", 0.8), 
                    "top_p": kwargs.get("top_p", 1)
                },
            )
            return response.choices[0].message.content
        except Exception as e:
            return str(e)

class GPT(LLM_base):
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        import openai
        self.client = openai.OpenAI(
            base_url=os.environ.get("OPENAI_BASE_URL", self.config.get('openai_base_url', None)),
            api_key=os.environ.get("OPENAI_API_KEY", self.config['openai_api_key']),
            max_retries=kwargs.get("max_retries", 5),
            timeout=kwargs.get("timeout", httpx.Timeout(60.0, read=20.0, write=20.0, connect=20.0)),
        )
    
    def chat(self, prompt="Aloha, what version are you?", **kwargs):
        for key in self.kwargs:
            if key not in kwargs:
                kwargs[key] = self.kwargs[key]
        try:
            model_name = kwargs.get("model", "gpt-4o-mini")
            if kwargs.get("debug", False):
                print(">> model_name: ", model_name)
            response = self.client.chat.completions.create(
                messages=kwargs.get("messages", [
                    {
                        "role": "user",
                        "content": prompt,
                    }
                ]),
                model=model_name,
                temperature=kwargs.get("temperature", 0.8),
                top_p=kwargs.get("top_p", 1),
            )
            return response.choices[0].message.content
        except Exception as e:
            return str(e)
    
class DeepSeek(LLM_base):
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        from openai import OpenAI
        self.client = OpenAI(
            base_url="https://api.deepseek.com",
            api_key=os.environ.get("OPENAI_API_KEY", self.config['deepseek_api_key']),
            max_retries=kwargs.get("max_retries", 5),
            timeout=kwargs.get("timeout", httpx.Timeout(60.0, read=20.0, write=20.0, connect=20.0)),
        )
    
    def chat(self, prompt="Aloha, what version are you?", **kwargs):
        for key in self.kwargs:
            if key not in kwargs:
                kwargs[key] = self.kwargs[key]
        try:
            model_name = kwargs.get("model", "deepseek-chat")
            if kwargs.get("debug", False):
                print(">> model_name: ", model_name)
            response = self.client.chat.completions.create(
                messages=kwargs.get("messages", [
                    {
                        "role": "user",
                        "content": prompt,
                    }
                ]),
                model=model_name,
                temperature=kwargs.get("temperature", 0.8),
                top_p=kwargs.get("top_p", 1),
            )
            return response.choices[0].message.content
        except Exception as e:
            return str(e)
        
class Baidu(LLM_base):
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        import qianfan
        qianfan.disable_log()
        # print(self.config)
        ACCESS_KEY = os.environ.get("QIANFAN_ACCESS_KEY", self.config.get('baidu_access_key', None))
        if ACCESS_KEY is not None:  # 百度新版调用方式，优先使用，兼容性较强
            SECRET_KEY = os.environ.get("QIANFAN_SECRET_KEY", self.config.get('baidu_secret_key', None))
            os.environ["QIANFAN_ACCESS_KEY"] = ACCESS_KEY
            os.environ["QIANFAN_SECRET_KEY"] = SECRET_KEY
        else:
            API_KEY = os.environ.get("QIANFAN_AK", self.config.get('baidu_api_key_v1', None))
            SECRET_KEY = os.environ.get("QIANFAN_SK", self.config.get('baidu_secret_key_v1', None))
            os.environ["QIANFAN_AK"] = API_KEY
            os.environ["QIANFAN_SK"] = SECRET_KEY
        self.client = qianfan.ChatCompletion()

    def chat(self, prompt="Aloha, what version are you?", **kwargs):
        for key in self.kwargs:
            if key not in kwargs:
                kwargs[key] = self.kwargs[key]
        try:
            model_name = kwargs.get("model", "ERNIE-Speed-128K")
            if kwargs.get("debug", False):
                print(">> model_name: ", model_name)
            response = self.client.do(
                messages=kwargs.get("messages", [
                    {
                        "role": "user",
                        "content": prompt,
                    }
                ]),
                model=model_name,
                temperature=kwargs.get("temperature", 0.8),
                top_p=kwargs.get("top_p", 1),
            )
            return response.body["result"]
        except Exception as e:
            return str(e)

if __name__ == "__main__":
    chat_bot = LLM(model=None, debug=True)
    for model_name in ["glm-4-flash", "glm-4-plus", "gpt-4o-mini", "gpt-4o"]: #, "Doubao-pro-128k", "ERNIE-Speed-128K", "gpt-4o", "deepseek-chat"]:
        print(chat_bot.chat(
                model = model_name, 
                prompt="什么是损失函数？",
                web_search=True,
            )
        )

    