# 基于企业知识库的客服智能问答系统
from langchain.chains import ConversationalRetrievalChain
from langchain_community.llms import Tongyi
from db.Embeddings_DB import load_and_vectorize_texts
from settings import settings


def qa_agent(question, api_key, memory):
    model = Tongyi(dashscope_api_key=api_key)
    db = load_and_vectorize_texts(settings.DATA_PATH, api_key)
    chain = ConversationalRetrievalChain.from_llm(
        llm=model,
        retriever=db.as_retriever(),
        memory=memory
    )
    res = chain.invoke({"question": question, "chat_history": []})
    return res


# if __name__ == '__main__':
#     from langchain.memory import ConversationBufferMemory
#     from settings import settings
#     api_key = settings.DASH_API_KEY
#     memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
#     question = "辣鸡大学是哪两个学校合并来的？"
#     print(qa_agent(question, api_key, memory))