from langchain.chains.retrieval import create_retrieval_chain
from langchain.memory import ConversationBufferMemory
from langchain_community.document_loaders import TextLoader
from langchain_core.prompts import ChatPromptTemplate
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_community.embeddings import DashScopeEmbeddings
from langchain_community.vectorstores import FAISS
from langchain.chains import ConversationalRetrievalChain
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain_openai import ChatOpenAI

import os

os.environ["DASHSCOPE_API_KEY"] = 'sk-c44402d7a12c41299bb716af8d7e8bac'

loader = TextLoader("../other/xiaomi.txt", encoding="utf-8")
document = loader.load()

text_split = RecursiveCharacterTextSplitter(
    chunk_size=100,
    chunk_overlap=20,
    separators=["\n\n", "\n", "。", "，", " ", ""]
)

documents = text_split.split_documents(document)

embedding_model = DashScopeEmbeddings(
    model="text-embedding-v1"
)

db = FAISS.from_documents(documents, embedding_model)

retriever = db.as_retriever()

###################################

# response = retriever.invoke("电机")
#
# print(response)
#
# for content in response:
#     print(content)

###################################

llm = ChatOpenAI(
    api_key=os.getenv("DASHSCOPE_API_KEY"),  # 如果您没有配置环境变量，请在此处用您的API Key进行替换
    base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",  # 填写DashScope base_url
    model="qwen-plus"
)

memory = ConversationBufferMemory(
    return_message=True,
    memory_key="chat_history",
    output_key="answer"
)

# chain = ConversationalRetrievalChain.from_llm(
#     llm=llm,
#     memory=memory,
#     retriever=retriever,
#     # chain_type="map_reduce"
# )
#
# response = chain.invoke({"question": "SU7是什么"})
#
# print(response['answer'])

############################

system_prompt = (
    "You are an assistant for question-answering tasks. "
    "Use the following pieces of retrieved context to answer "
    "the question. If you don't know the answer,say that you "
    "don't know. Use three sentences maximum and keep the "
    "answer concise."
    "\n\n"
    "{context}"
)

prompt = ChatPromptTemplate.from_messages(
    [
        ("system", system_prompt),
        ("human", "{input}")
    ]
)

stuff_chain = create_stuff_documents_chain(llm=llm, prompt=prompt)

chain = create_retrieval_chain(retriever, stuff_chain)

response = chain.invoke({"input": "SU7是什么"})

print(response)
