import os

from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain_community.chat_models import ChatOpenAI
from langchain_openai import ChatOpenAI


class Tongyi_llm:
    _llm_instance = None
    _memory = None
    _chain = None

    def create_llm(self, key):
        os.environ["DASHSCOPE_API_KEY"] = 'sk-c44402d7a12c41299bb716af8d7e8bac'
        if not self._llm_instance:
            # self._llm_instance = Tongyi()
            self._llm_instance = ChatOpenAI(
                api_key=os.getenv("DASHSCOPE_API_KEY"),  # 如果您没有配置环境变量，请在此处用您的API Key进行替换
                base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",  # 填写DashScope base_url
                model="qwen-plus"
            )
            self._memory = ConversationBufferMemory(
                return_messages=True,
                memory_key='history'
            )
            self._chain = ConversationChain(
                llm=self._llm_instance,
                memory=self._memory
            )
        return self._llm_instance

    def invoke(self, user_input):
        response = self._chain.invoke({'input': user_input})
        return response["response"]

    def get_chain(self):
        return self._chain

    def get_memory(self):
        return self._memory

#############################

# llm = Tongyi_llm()
# llm.create_llm('sk-c44402d7a12c41299bb716af8d7e8bac')
# chain = llm.get_chain()
#
# while True:
#     user_input = input('> ')
#     response = chain.invoke({'input': user_input})
#     print(response)
#     print(llm._memory.load_memory_variables({})['history'])
