from operator import itemgetter
from typing import List

import dotenv
from langchain.retrievers import MultiQueryRetriever
from langchain_community.chat_models.moonshot import MoonshotChat
from langchain_community.embeddings.baidu_qianfan_endpoint import QianfanEmbeddingsEndpoint
from langchain_community.vectorstores.faiss import FAISS
from langchain_core.callbacks import CallbackManagerForRetrieverRun
from langchain_core.documents import Document
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import PromptTemplate

dotenv.load_dotenv()

llm = MoonshotChat(model="moonshot-v1-8k")


class CustomMulitQuery(MultiQueryRetriever):

    def format_qa(self, q: str, a: str):
        return f"Question: {q}\nAnswer: {a}\n"

    def retrieve_format(self, lis: list[Document]):
        s = ""
        for item in lis:
            s += item.page_content + "\n"
        return s

    def retrieve_documents(
            self, queries: List[str], run_manager: CallbackManagerForRetrieverRun
    ) -> str:
        """Run all LLM generated queries.

        Args:
            queries: query list

        Returns:
            List of retrieved Documents
        """
        documents = []
        answer = ""
        doc_str = ""
        for query in queries:
            docs = self.retriever.invoke(
                query, config={"callbacks": run_manager.get_child()}
            )
            documents.extend(docs)
            prompt_str = """
                请根据以下信息回答用户的问题
                ----------这是问答对------------
                {qa}
                -----------这是背景信息----------
                {context}
                用户的问题: {question}
                """
            prompt = PromptTemplate.from_template(prompt_str)
            chain = (
                    {
                        "context": itemgetter("context"),
                        "question": itemgetter("question"),
                        "qa": itemgetter("qa")
                    } | prompt | llm | StrOutputParser()
            )
            answer = chain.invoke({"context": self.retrieve_format(documents), "question": query, "qa": doc_str})
            doc_str += self.format_qa(query, answer)
        return answer

    def unique_union(self, doc_str: str) -> str:

        return doc_str


emb = QianfanEmbeddingsEndpoint()
db = FAISS.load_local("./file/", embeddings=emb, allow_dangerous_deserialization=True)
custom_re = CustomMulitQuery.from_llm(
    llm=llm,
    retriever=db.as_retriever(search_type="mmr")
)
print(custom_re.invoke("笨笨是谁呢？"))
