# encoding: utf-8
from langchain.chat_models import ChatOpenAI
LLM_SERVER_ADDRESS = r'https://u359243-9d78-f301a417.bjb1.seetacloud.com:8443'


def create_langchain_llm_client(model_name, api_path, max_tokens, streaming):
    if model_name is None:
        model_name = get_server_llm_model_names()[0]

    if api_path is None:
        api_path = LLM_SERVER_ADDRESS + '/v1'

    llm_client = ChatOpenAI(
        streaming=streaming,
        model_name=model_name,
        openai_api_base=api_path,
        openai_api_key="none",
        # callbacks=[StreamingStdOutCallbackHandler()],
        # temperature=0.5,
        temperature=0.0,
        max_tokens=max_tokens,
        # verbose=True,
    )
    return llm_client


def get_server_llm_model_names():
    """
    获取可用的大模型名字
    :return:
    """
    # response = requests.post(CONTROLLER_ADDRESS + '/list_models')
    # long_info = json.loads(response.text)
    # llm_model_names = long_info['models']
    llm_model_names = ['/root/autodl-tmp/deepseek/DeepSeek-R1-Distill-Qwen-7B']
    return llm_model_names
