from langchain_openai import ChatOpenAI
from typing import (
    Optional,
    Callable,
    Any,
    Awaitable,
    List
)

import logging
import asyncio

# 加载模型配置信息
from configs import ONLINE_LLM_MODEL, logger

# 获取Embedding 模型
from server.minx_chat_openai import MinxChatOpenAI

def get_ChatOpenAI(
        model_name: str,
        temperature: float,
        max_tokens: int = None,
        streaming: bool = True,
        callbacks: List[Callable] = [],
        verbose: bool = True,
        **kwargs: Any,
) -> ChatOpenAI:
    """
    定义了一个用于创建 ChatOpenAI 实例的函数 get_ChatOpenAI，并且在函数参数中指定了一些默认参数和可选参数。
    :param model_name:要使用的模型名称
    :param temperature:采样温度
    :param max_tokens:最大输入限制
    :param verbose:是否打印详细进程
    :param kwargs:
    :return: ChatOpenAI 实例
    """
    # 首先，你要判断当前加载的是哪个模型
    config = get_model_worker_config(model_name)
    if config is None:
        print("错误: 配置未正确加载。")
        print(model_name)
        return None
    # ChatOpenAI._get_encoding_model = MinxChatOpenAI.get_encoding_model
    model = ChatOpenAI(
        streaming=streaming,
        verbose=verbose,
        callbacks=callbacks,
        openai_api_base=config.get("api_base_url"),
        openai_api_key=config.get("api_key"),
        model_name=config.get("model_name"),
        temperature=temperature,
        max_tokens=max_tokens,
        openai_proxy=config.get("openai_proxy"),
        **kwargs
    )
    return model
    


def get_model_worker_config(model_name: str = None) -> dict:
    """

    :param model_name:
    :return: model config

    """

    config = ONLINE_LLM_MODEL.get(model_name)
    return config


def get_prompt_template(type: str, name: str) -> Optional[str]:
    '''
    从prompt_config中加载模板内容
    type: "llm_chat","agent_chat","knowledge_base_chat","search_engine_chat"的其中一种，如果有新功能，应该进行加入。
    '''

    from configs import prompt_config
    import importlib
    importlib.reload(prompt_config)
    return prompt_config.PROMPT_TEMPLATES[type].get(name)



async def wrap_done(fn: Awaitable, event: asyncio.Event):
    """Wrap an awaitable with a event to signal when it's done or an exception is raised."""

    log_verbose = False

    try:
        await fn
    except Exception as e:
        logging.exception(e)
        msg = f"Caught exception: {e}"
        logger.error(f'{e.__class__.__name__}: {msg}',
                     exc_info=e if log_verbose else None)
    finally:
        # Signal the aiter to stop.
        event.set()


if __name__ == '__main__':

    # 测试加载在线模型
    ans = get_model_worker_config("zhipu-api")
    print(ans)
    print(ans.get("api_key"))
