from langchain.chains import RetrievalQA
from langchain.prompts.chat import (
    ChatPromptTemplate,
    SystemMessagePromptTemplate,
    HumanMessagePromptTemplate,
)
from langchain.embeddings.huggingface import HuggingFaceEmbeddings
from langchain.vectorstores import FAISS
from langchain.document_loaders import UnstructuredFileLoader
from langchain.llms.base import LLM
from typing import Optional, List


# Global Parameters

class GetKnowledgePrompt:
    def __init__(self, knowledge_path, search_model_name):
        self.model_name = search_model_name
        self.knowledge_path = knowledge_path
        self.llm_model = MyLLM()

    def init_knowledge_vector_store(self):
        embeddings = HuggingFaceEmbeddings(model_name=self.model_name, )
        loader = UnstructuredFileLoader(self.knowledge_path, mode="elements")
        docs = loader.load()
        vector_store = FAISS.from_documents(docs, embeddings)
        return vector_store

    def get_knowledge_based_answer(self, query, chat_history=[]):
        system_template = """记住你叫小易是一个党建知识助手，结合上下文并基于以下内容，简洁和专业的来回答用户的问题。
        如果无法从中得到答案，请说 "不知道" 或 "没有足够的相关信息"，不要试图编造答案。答案请使用中文,回答时请使用正常的格式。
        ----------------
        {context}
        ----------------
        """
        messages = [
            SystemMessagePromptTemplate.from_template(system_template),
            HumanMessagePromptTemplate.from_template("{question}"),
        ]
        prompt = ChatPromptTemplate.from_messages(messages)
        vector_store = self.init_knowledge_vector_store()

        knowledge_chain = RetrievalQA.from_llm(
            llm=self.llm_model,
            retriever=vector_store.as_retriever(search_kwargs={"k": 6}),
            prompt=prompt
        )

        knowledge_chain.return_source_documents = False

        result_prompt = knowledge_chain({"query": query})
        print(result_prompt)

        return result_prompt


class GetPromptByText:
    def __init__(self, search_model_name):
        self.model_name = search_model_name
        self.llm_model = MyLLM()

        self.embeddings = HuggingFaceEmbeddings(model_name=self.model_name, )
    def init_knowledge_vector_store(self,text):
        #将text按照句号或\n分割为texts列表
        texts = text.split('\n')
        vector_store = FAISS.from_texts(texts, self.embeddings)
        return vector_store

    def get_knowledge_based_answer(self,text, query, chat_history=[]):
        system_template = """记住你叫小易是一个党建知识助手，结合上下文并基于以下内容，简洁和专业的来回答用户的问题。
        如果无法从中得到答案，请说 "不知道" 或 "没有足够的相关信息"，不要试图编造答案。答案请使用中文,回答时请使用正常的格式。
        ----------------
        {context}
        ----------------
        """
        messages = [
            SystemMessagePromptTemplate.from_template(system_template),
            HumanMessagePromptTemplate.from_template("{question}"),
        ]
        prompt = ChatPromptTemplate.from_messages(messages)
        vector_store = self.init_knowledge_vector_store(text)

        knowledge_chain = RetrievalQA.from_llm(
            llm=self.llm_model,
            retriever=vector_store.as_retriever(search_kwargs={"k": 6}),
            prompt=prompt
        )

        knowledge_chain.return_source_documents = False

        result_prompt = knowledge_chain({"query": query})
        return result_prompt


class MyLLM(LLM):

    def __init__(self):
        super().__init__()

    @property
    def _llm_type(self) -> str:
        return "ChatG"

    def _call(self, prompt: str, stop: Optional[List[str]] = None) -> str:
        prompt
        return prompt
