import os
from typing import List, Dict, Tuple

from langchain import PromptTemplate
from langchain.chains.qa_with_sources import load_qa_with_sources_chain
from langchain.chat_models import ChatOpenAI
from langchain.schema import Document

from bot.openai_bot import OpenAIBot
from conf.config import config
from db.milvus.building_info_text_chunk_dao import BuildingInfoTextChunkDAO as BuildingInfoTextChunkMilvusDAO
from db.mysql.building_info_text_chunk_dao import BuildingInfoTextChunkDAO as BuildingInfoTextChunkMysqlDAO
from utils.constants import stuff_prompt_template_custom, map_reduce_question_prompt_template_custom, \
    map_reduce_combine_prompt_template_custom

os.environ["OPENAI_API_BASE"] = config["openai"]["open_ai_proxy"]["api_base"]
os.environ["OPENAI_API_KEY"] = config["openai"]["open_ai_proxy"]["api_key"]


def get_relevant_documents(query: str, top_k: int) -> List[Document]:
    """
    获取相关文档
    :param query: 待查询的文本
    :param top_k: 前几个
    :return:
    """
    # 向量化
    openai_bot = OpenAIBot()
    vector = openai_bot.embeddings(text_list=[query, ])[0]

    # 查找最近的几个向量
    building_info_text_chunk_dao = BuildingInfoTextChunkMilvusDAO()
    ids = building_info_text_chunk_dao.get_ids(vector=vector, top_k=top_k)

    # 查找对应的文本块
    building_info_text_chunk_dao = BuildingInfoTextChunkMysqlDAO()
    text_chunks = building_info_text_chunk_dao.get_text_chunks(text_chunk_ids=[item.get("id") for item in ids])

    # 往text_chunks追加distance距离字段
    for item in text_chunks:
        for _item in ids:
            if _item.get("id") == item.get("text_chunk_id"):
                item.update({"distance": _item.get("distance")})
                break

    docs = [
        Document(
            page_content=item.get("text_chunk"),
            metadata={"source": item.get("text_chunk_id"), "source_web": item.get("source_web"), "distance": item.get("distance")})
        for item in text_chunks
    ]

    return docs


def get_answer(question: str, top_k: int, chain_type: str) -> Tuple[str, Dict]:
    """
    获取答复
    :param question: 待答复问题
    :param top_k: 要查找前几个相关文档
    :param chain_type: 要使用的langchain问答链的类型
    :return:
    """
    # 查找相关文档
    docs = get_relevant_documents(query=question, top_k=top_k)

    # 获取答案
    if chain_type == "stuff":
        stuff_prompt = PromptTemplate(template=stuff_prompt_template_custom, input_variables=["summaries", "question"])
        chain = load_qa_with_sources_chain(
            llm=ChatOpenAI(temperature=0), chain_type=chain_type,
            prompt=stuff_prompt,
            verbose=True
        )
    elif chain_type == "map_reduce":
        map_reduce_question_prompt = PromptTemplate(template=map_reduce_question_prompt_template_custom,
                                                    input_variables=["context", "question"])
        map_reduce_combine_prompt = PromptTemplate(template=map_reduce_combine_prompt_template_custom,
                                                   input_variables=["summaries", "question"])
        chain = load_qa_with_sources_chain(
            llm=ChatOpenAI(temperature=0, model_kwargs={"batch_size": 5}), chain_type=chain_type,
            question_prompt=map_reduce_question_prompt, combine_prompt=map_reduce_combine_prompt,
            return_map_steps=True,
            verbose=True
        )
    else:
        raise ValueError(f"只支持stuff和map_reduce链类型")

    res = chain({"input_documents": docs, "question": question}, return_only_outputs=True)

    answer = res.get("output_text")

    answer_details = {
        "relevant_docs": [[doc.metadata.get("source"), doc.metadata.get("distance"),
                           doc.page_content, doc.metadata.get("source_web")] for doc in docs]
    }

    return answer, answer_details


def main():
    # question = "万科未来之光值得买吗？"
    # res = get_answer(question=question, top_k=5, chain_type="stuff")
    # print(f"{res}")

    question = "万科未来之光值得买吗？"
    res = get_answer(question=question, top_k=5, chain_type="map_reduce")
    print(f"{res}")


if __name__ == '__main__':
    main()
