import networkx as nx
import numpy as np
import pandas as pd
import random
import time
import os
import pingouin as pg
import json
from visual import plot_networkx
from structure_graph import generate_structure_graphs
import argparse
import logging


class CauseLocator:
    def __init__(self, config) -> None:
        self._config = config
        self._data_dir = self._config.get("args").get("data_dir")
        logging.info(f"===================== dataset: {self._data_dir} =====================")
        self._filename = os.path.basename(self._data_dir)
        self._output_dir = self._config.get("args").get("output_dir")
        if self._output_dir is None:
            self._output_dir = os.path.join(os.path.dirname(os.path.dirname(self._data_dir)), \
                                           "output",
                                           self._filename)
        if not os.path.exists(self._output_dir):
            os.makedirs(self._output_dir)

        # 执行结构图构建
        self._graph_list = generate_structure_graphs(self._config)


    def remove_self(self, list_a, a):
        list_a = list(list_a)
        if a in list_a:
            list_a.remove(a)
            return list_a
        else:
            return list_a

    def generate_df(self, graph):
        """
        将各节点的指标值整合成DataFrame
        """
        dict_tmp = {}
        for i in graph.nodes():
            dict_tmp[i] = graph.nodes.get(i).get("timelist")
        return pd.DataFrame(dict_tmp)


    def partial_corr_c(self, graph, v_j, v_fe, df, corr_type):
        """
        计算下一个节点v_j和前端节点v_fe的偏相关
        """
        if v_j == v_fe:
            return 1.

        parent_v_j = self.remove_self(graph.pred.get(v_j), v_j)
        parent_v_fe = self.remove_self(graph.pred.get(v_fe), v_fe)

        confounder = list(set(parent_v_j) | set(parent_v_fe))
        if v_fe in confounder:
            confounder.remove(v_fe)
        if v_j in confounder:
            confounder.remove(v_j)

        if len(df[v_fe].unique()) == 1:
            return 0.
        if len(df[v_j].unique()) == 1:
            return 0.
        for item in confounder:
            if len(df[item].unique()) == 1:
                confounder.remove(item)

        return abs(pg.partial_corr(data=df, x=v_fe, y=v_j, covar=confounder, method=corr_type)['r'].values[0])


    def transfer_prob(self, graph, nodes: tuple, df, corr_type, corr_prop):
        """
        综合考虑异常分数和偏相关计算下一个节点v_j的跳转概率
        """
        v_i, v_j, v_fe = nodes[0], nodes[1], nodes[2]
        v_i_score = graph.nodes.get(v_i).get("anomaly_score")
        v_j_score = graph.nodes.get(v_j).get("anomaly_score")
        transfer_anomaly = v_j_score / (v_i_score + v_j_score) if (v_i_score + v_j_score) != 0 else 0
        transfer_corr = self.partial_corr_c(graph, v_j, v_fe, df, corr_type)
        return transfer_anomaly * (1 - corr_prop) + transfer_corr * corr_prop


    def probablity_matrix(self, graph, v_fe):
        """
        计算概率矩阵
        """
        r = self._config.get("args").get("r")
        remove_kpi = bool(self._config.get("args").get("remove_kpi"))
        corr_type = self._config.get("args").get("corr_type")
        corr_prop = self._config.get("args").get("corr_prop")

        n = len(graph.nodes())
        df = self.generate_df(graph)
        transfer_mat = pd.DataFrame(np.zeros((n, n), dtype=np.float64),
                        index=graph.nodes(), columns=graph.nodes())
        for i in graph.nodes():
            successor = self.remove_self(graph.succ.get(i), i)
            for j in successor:
                transfer_mat[i][j] = r * self.transfer_prob(graph, (i, j, v_fe), df, corr_type, corr_prop)
            
            if remove_kpi:
                parents_i = self.remove_self(self.remove_self(graph.pred.get(i), i), v_fe)
            else:
                parents_i = self.remove_self(graph.pred.get(i), i)
            for j in parents_i:
                transfer_mat[i][j] = self.transfer_prob(graph, (i, j, v_fe), df, corr_type, corr_prop)

            c_self = self.transfer_prob(graph, (i, i, v_fe), df, corr_type, corr_prop)
            if c_self > transfer_mat[i].max():
                transfer_mat[i][i] = c_self - transfer_mat[i].max()

            s = transfer_mat[i].sum()
            if s == 0:
                continue
            for j in set(parents_i+successor):
                transfer_mat[i][j] = transfer_mat[i][j] / s
            transfer_mat[i][i] = transfer_mat[i][i] / s
        return transfer_mat


    def random_pick(self, some_list, probabilities):
        """
        根据跳转概率probabilities从some_list选择下一个跳转的节点
        """
        x = random.uniform(0, 1)
        cumulative_probability = 0.0
        for item, item_probability in zip(some_list, probabilities):
            cumulative_probability += item_probability
            if x < cumulative_probability:
                break
        return item


    def random_walk(self, graph, transfer_mat, num_loop, v_fe):
        """
        一阶随机游走
        """

        # 初始化
        v_cur = v_fe
        visit_count = dict.fromkeys(graph.nodes(), 0)

        # 循环随机游走
        for _ in range(num_loop):
            v_cur = self.random_pick(transfer_mat.index.tolist(), transfer_mat[v_cur].values)
            visit_count[v_cur] += 1

        visit_order = sorted(visit_count.items(), key=lambda x: x[1], reverse=True)
        return visit_order


    def random_walk_second_order(self, graph, transfer_mat, num_loop, beta, v_fe):
        """
        二阶随机游走
        """

        # 初始化
        v_cur = v_fe
        v_pre = v_fe
        visit_count = dict.fromkeys(graph.nodes(), 0)
        danamic_transfer = dict.fromkeys(graph.nodes(), 0)

        # 循环随机游走
        for _ in range(num_loop):
            p_to_c = transfer_mat[v_pre][v_cur]
            sum = 0
            for key, value in transfer_mat[v_cur].iteritems():
                if value > 0:
                    danamic_transfer[key] = (1-beta) * p_to_c + beta * transfer_mat[v_cur][key]
                    sum = sum + (1-beta) * p_to_c + beta * transfer_mat[v_cur][key]
            if sum == 0:
                for key, value in danamic_transfer.items():
                    danamic_transfer[key] = 1 / len(danamic_transfer.items())
            else:
                for key, value in danamic_transfer.items():
                    danamic_transfer[key] = value / sum

            v_next = self.random_pick(danamic_transfer.keys(), danamic_transfer.values())
            visit_count[v_next] += 1

            v_pre = v_cur
            v_cur = v_next
        visit_order = sorted(visit_count.items(), key=lambda x: x[1], reverse=True)
        return visit_order
    
    def infer_causal_path(self, ac_k, graph, location_result, front_end_metric, output_dict, special_sli_metrics):
        """
        推理根因路径
        """
        error_count = 0
        for top_i in range(ac_k):
            # 根因指标
            try:
                while location_result[top_i][0] == front_end_metric \
                    or location_result[top_i][0].split("@")[0] in special_sli_metrics:
                    del location_result[top_i]
                root_cause_metric = location_result[top_i][0]
            except IndexError as e:
                logging.warning(f"IndexError: {e}")
                continue
            logging.info(f"""timestamp: {output_dict.get("TimeStamp")}, \
top{top_i+1}, root_cause_metric: {root_cause_metric}, score: {location_result[top_i][1]}""")

            # 更新边的权值
            undir_g = nx.Graph(graph)
            for s, t in undir_g.edges():
                undir_g.edges[s, t].update(
                    {"weight": (2 / (graph.nodes.get(s).get("anomaly_score") +
                                        graph.nodes.get(t).get("anomaly_score") ))})

            # 搜索两点间最短路径
            try: 
                shortest_path = nx.dijkstra_path(
                    undir_g, root_cause_metric, front_end_metric)
                shortest_path_len = nx.dijkstra_path_length(
                    undir_g, root_cause_metric, front_end_metric)
                score = 1 / shortest_path_len
                # 绘制因果图
                visual_path = os.path.join(self._output_dir, 
                                            str(output_dict.get("TimeStamp")) + "_top" + str(top_i + 1) + ".html")
                plot_networkx(graph=graph, save_path=visual_path,
                                front_end_metric=front_end_metric, root_cause_path=shortest_path)
            except nx.exception.NetworkXNoPath as e:
                error_count += 1
                logging.warning(f"nx.exception.NetworkXNoPath: {e}")
                shortest_path = None
                score = None
                visual_path = None
            output_dict["top" + str(top_i + 1)] = {
                "root_cause": root_cause_metric,
                "root_cause_path": shortest_path,
                "score": score,
                "visual_path": visual_path
            }
        return output_dict, error_count
        
    
    def main(self): 
        """
        运行根因定位算法核心
        """
        num_loop = self._config.get("args").get("num_loop")
        beta = self._config.get("args").get("beta")
        front_end_metric = self._config.get("args").get("front_end_metric")
        ac_k = self._config.get("args").get("ac_k")

        
        error_counts = 0
        outputs = []
        with open(os.path.join(self._data_dir, "result.json"), "r") as f:
            anomaly_results = json.load(f)
        
        for index, graph in enumerate(self._graph_list):
            output_dict = {}

            # 1. 计算转移概率
            transfer_matrix = self.probablity_matrix(graph, front_end_metric)

            # 2. 执行随机游走
            if beta == 1:
                rw_result = self.random_walk(graph, transfer_matrix, num_loop, front_end_metric)
            else:
                rw_result = self.random_walk_second_order(graph, transfer_matrix, num_loop, beta, front_end_metric)
            location_result = [(k, v / num_loop) for k, v in rw_result]        

            output_dict["TimeStamp"] = anomaly_results[index].get("TimeStamp")
            output_dict["TimeStr"] = str(pd.to_datetime(anomaly_results[index].get("TimeStamp") // 1000, unit="s"))
            output_dict["random_walk_result"] = rw_result   

            # 3. 推理根因路径
            special_sli_metrics = graph.graph["special_sli_metrics"]
            output_dict, error_count = self.infer_causal_path(ac_k, graph, location_result,
                                                              front_end_metric, output_dict, special_sli_metrics)
            outputs.append(output_dict)
            error_counts += error_count

        with open(os.path.join(self._output_dir, self._filename + ".json"), "w+") as outfile:
            json.dump(outputs, outfile, indent=4)
        logging.info(f"error_counts: {error_counts}")


if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-config", type=str, default="config/config.json")
    args = parser.parse_args()

    start_time = time.time()

    logging.basicConfig(format="%(asctime)s [%(levelname)s]: %(message)s",
                        filename="CauseLocator.log",
                        level=logging.INFO)
    with open(args.config, "r") as f:
        config = json.load(f)
    cause_locator = CauseLocator(config)
    cause_locator.main()

    end_time = time.time()
    logging.info(f"run time is: {int(end_time - start_time)} s")
