import os

from dotenv import load_dotenv
from langchain_openai import ChatOpenAI

# 加载环境变量
load_dotenv()

model_name = os.getenv("LLM_CHAT_MODEL")
api_key=os.getenv('LLM_API_KEY')
base_url=os.getenv('LLM_BASE_URL')
print(model_name,api_key,base_url)
class LLMInit:
    def __init__(self, temperature=0.75):
        self.llm = ChatOpenAI(temperature=temperature,
                              model_name=model_name,
                              api_key=api_key,
                              base_url=base_url)

    def get_llm(self):
        return self.llm
