import os
import logging

from SimpleRetriever import SimpleRetriever
from ESSearch import ESSearch
from typing import Optional, Union, Tuple, List, Any
from flag_models import FlagReranker
import torch


class DummyRetriever:
    """占位符检索器，用于处理缺失的检索路径"""

    def __init__(self):
        self.content = None

    def search(self, query: str, top_k: int):
        return []  # 返回空结果


class QueryProcessor:
    def __init__(
        self,
        faiss_database_path: str,
        es_host: str,
        es_username: str,
        es_password: str,
        es_index: str,
        reranker_path: str,
        emb_model_path: str,
        device: str,
    ):
        logging.info("Connect to Elastic Service...")
        self.es = ESSearch(es_host, es_username, es_password)
        self.es_index = es_index

        self.reranker = FlagReranker(
            model_name_or_path=reranker_path, use_fp16=True, device=device
        )
        logging.info("Initialzing Retrievers...")
        name_list = ["0", "1", "2"]  # q-q q-a q-document
        # 三路召回的index存放路径
        index_list = [
            os.path.join(faiss_database_path, name, "faiss.index") for name in name_list
        ]
        data_list = [
            os.path.join(faiss_database_path, name, "chunk.pkl") for name in name_list
        ]

        # 存放三条召回路线的retriever
        self.retriever_list: list[Optional[SimpleRetriever]] = []
        for index_path, folder_path in zip(index_list, data_list):
            if os.path.exists(index_path) and os.path.exists(folder_path):
                retriever = SimpleRetriever(
                    index_path, folder_path, emb_model_path, device
                )
                self.retriever_list.append(retriever)
            else:
                logging.warning(
                    f"Missing retriever data for {index_path} or {folder_path}. Using DummyRetriever."
                )
                self.retriever_list.append(DummyRetriever())

    def deduplicate_with_source(self, answers_sumup, answer_source):
        seen = set()  # 用于存储已见的元素
        deduped_answers = []
        deduped_sources = []

        for answer, source in zip(answers_sumup, answer_source):
            if answer not in seen:
                seen.add(answer)
                deduped_answers.append(answer)
                deduped_sources.append(source)

        return deduped_answers, deduped_sources

    def get_content_and_source(self, data):
        if any(isinstance(i, list) for i in data) and data:
            return data[0], data[1]
        else:
            return data, []

    def muti_linkup(
        self, query_list: List[str], top_k: int = 5, modes: List[str] = None
    ):
        q_q_list_list: List[List[str]] = []
        q_a_list_list: List[List[str]] = []
        q_c_list_list: List[List[str]] = []
        e_s_result_list: List[List[str]] = []
        rag_output_list: List[str] = []
        q_c_list: List[str] = []
        q_c_source: List[str] = []
        for q in query_list:
            # print("query", "$" * 30)
            # print(q)
            if "q-c" in modes:
                q_q_list, q_a_list, q_c_list = self.search_multi(q, top_k, modes=modes)

                # deduplicate

                q_c_list, q_c_source = self.get_content_and_source(q_c_list)
                q_c_list, q_c_source = self.deduplicate_with_source(q_c_list, q_c_source)

            # es
            e_s_result, es_source = self.es.query(q, self.es_index)

        return (
            q_q_list_list,
            q_a_list_list,
            q_c_list,
            q_c_source,
            e_s_result,
            es_source,
        )

    # 多路召回
    def search_multi(
        self, query: str, top_k: int = 5, modes: List[str] = None
    ) -> Tuple[Union[List, List], Union[List, List], Union[List, List]]:
        results = {"q-q": [], "q-a": [], "q-c": []}
        # 默认模式
        if modes is None:
            return [], [], []
        # 模式与索引的映射
        mode_to_index = {"q-q": 0, "q-a": 1, "q-c": 2}
        for mode in modes:
            if mode in mode_to_index:
                index = mode_to_index[mode]
                retriever = self.retriever_list[index]
                qac_recall_list, source_list = self.search_qa(query, retriever, top_k)
                if source_list != []:  # source_list不为空
                    re_ranked_qac_recall_list, re_ranked_source_list = zip(
                        *sorted(
                            zip(qac_recall_list, source_list),
                            key=lambda x: self.reRank(query, [x[0]]),
                        )
                    )
                    results[mode] = [
                        list(re_ranked_qac_recall_list),
                        list(re_ranked_source_list),
                    ]
                else:
                    re_ranked_qac_recall_list = self.reRank(query, qac_recall_list)
                    results[mode] = re_ranked_qac_recall_list
            else:
                logging.error(f"mode {mode} not supported")
        return results["q-q"], results["q-a"], results["q-c"]

    def search_qa(self, query: str, retriever, top_k) -> List[dict]:
        # 检索
        D, I_ = retriever.search(query, top_k=top_k)
        if I_ is not []:
            resu_index = [x for x in I_[0][:top_k]]

            recalled_data = (
                [retriever.content[x] for x in resu_index]
                if retriever.content is not None
                else []
            )
            source_data = (
                [retriever.source[x] for x in resu_index]
                if retriever.source is not None
                else []
            )

            return recalled_data, source_data
        else:
            return [], []

    def rearangeData(self, candidates, query: str) -> List[Tuple[str, Any]]:
        """
        :param candidates:候选答案
        :param query:问题
        :return:
        """
        result = [[query, candidate] for candidate in candidates]
        return result

    def get_rerank_score_2(self, arranged_list):
        all_rerank_score = []  # 保存所有的score
        for i in arranged_list:
            # print(type(i))
            scores = self.reranker.compute_score(i)
            all_rerank_score.append(scores)
        return all_rerank_score

    def getReraned(self, answers, all_rerank_score):
        try:
            combined = list(
                zip(answers, all_rerank_score)
            )  # 将列表 a 和 b 中的元素一一对应起来
        except Exception:
            logging.error(answers, all_rerank_score)
            exit()
        sorted_combined = sorted(
            combined, key=lambda x: x[1], reverse=True
        )  # 根据 b 的值从高到低排序
        sorted_candidate_pool = [
            x[0] for x in sorted_combined
        ]  # 仅获取排序后的字符串列表
        return sorted_candidate_pool

    def rearange_data(self, candidates: List[str], query: str):
        return [(query, candidate) for candidate in candidates]

    def get_rerank_score_2(self, arranged_list):
        all_rerank_score: List[float] = []  # 保存所有的score
        for i in arranged_list:
            scores = self.reranker.compute_score(i)
            all_rerank_score.append(scores)
        return all_rerank_score

    def reRank(self, query: str, qac_list: List[str]):
        arranged_list = self.rearange_data(qac_list, query)
        all_rerank_score = self.get_rerank_score_2(arranged_list)
        logging.info("got rerank score")
        # rerank
        reranked_can_pool = self.getReraned(qac_list, all_rerank_score)
        return reranked_can_pool


import yaml
import openai
openai.api_base = "https://api.gpts.vin/v1/chat"
openai.api_key = "sk-LRfUL9PZUBYAcvbME99c2a7457284217Af6f9fA88685****"
def llm_chat(query: str, context: str):
    Prompt = f"""你是一个熟悉mindspores的专业AI助手，下面是用户的问题和上下文信息，请根据上下文信息回答用户的问题。
    用户问题：{query},
    上下文信息：{context}
    请给出简洁明了的回答，尽量使用中文。
    """
    # 调用gpt
    try:
        response = openai.Completion.create(
            model="gpt-4o",
            messages=[
                {"role": "user", "content": Prompt},
            ],
            temperature=0.7,
            # max_tokens=10000,
            max_tokens=4096,
        )
        # print(response)
        model_output = response.choices[0].message.content
    except Exception as e:
        print("Error:", e)
        model_output = "抱歉，我无法处理您的请求。"
    print("model_output:", model_output)
    return model_output


import json

def QP():
    # 初始化参数解析器

    # 跳过 HTTPS 检查警告
    import urllib3

    os.environ["CUDA_VISIBLE_DEVICES"] = "5"
    configs = yaml.safe_load(open("/root/data/mindqa/config.yaml"))
    urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

    # 创建QueryProcessor实例
    query_processor = QueryProcessor(
        faiss_database_path="/root/data/mindqa/SimpleRAG/data/results",
        es_host=configs.get("ES_HOST"),
        es_username=configs.get("ES_USER"),
        es_password=configs.get("ES_PASSWORD"),
        es_index=configs.get("ES_INDEX"),
        reranker_path="/root/data/SimpleRAG_project/models/bge-rerank-large",
        emb_model_path="/root/data/SimpleRAG_project/models/mixed_model_1",
        device=torch.device("cuda:0" if torch.cuda.is_available() else "cpu"),
    )
    return query_processor

if __name__ == "__main__":
    query_processor = QP()
    query_list = ["什么是mindspore？"]
    q_q, q_a, q_c, q_c_source, e_s_result, es_source = query_processor.muti_linkup(
        query_list,
        top_k=4,
        modes = ["q-c"]
    )
    # res = {
    #     "q_q": q_q,
    #     "q_a": q_a,
    #     "q_c": q_c,
    #     "e_s_result": e_s_result,
    #     "rag_output": rag_output,
    # }
    # with open("output.json", "w") as f:
    #     json.dump(res, f, ensure_ascii=False, indent=4)
    print(q_c, q_c_source, e_s_result, es_source)
