import os
from configuration.config import model,api_key,temperature,base_url
from langchain_community.chat_models import ChatOpenAI


def returnllm():
    # model = "/nfs01/projects/50501243/s120212227183/code/qwen-1.5-32b/Qwen1.5-32B-Chat"
    # model = "Qwen-1.5-32b"
    # temperature = 0.7
    # api_key = "EMPTY"
    # base_url = "http://fushi.menglangpoem.cn:8099/v1"
    llm = ChatOpenAI(model=model, temperature=temperature, api_key=api_key, base_url=base_url, max_tokens=2048)
    res = llm.invoke("你是谁？")
    print(res.content)
    return llm
returnllm()