import json
import pandas as pd
import networkx as nx
from utils import date_to_timestamp, smooth_data
import os
import re
from statsmodels.tsa.stattools import grangercausalitytests
import logging


class StructureGraph:
    """
    利用元图构建基于指标的结构图
    """

    def __init__(self, special_sli_metrics: list, meta_gaph: nx.DiGraph, data: pd.DataFrame, data_score: dict, front_end_metric: str) -> None:
        self._meta_graph = meta_gaph
        self._data = data
        self._data_score = data_score
        self._front_end_metric = front_end_metric
        self._special_sli_metrics = special_sli_metrics
        self._metric_list = None
        self._structure_gragh = None


    def plug_meta_graph(self, topk=10) -> None:
        """
        元图的指标插装
        """

        # 异常分数从大到小排序
        temp = sorted(self._data_score.items(), key=lambda x: -x[1])
        self._metric_list = [col for col,
                             _ in temp if col in self._data.columns]

        # 元图中的实体和具体topk指标对应
        pattern = r"gala_gopher_([a-zA-Z]+)_"
        entity_metrics_mapping = {entity: {"mapping_metrics": []}
                                  for entity in list(self._meta_graph.nodes())}
        for metric in self._metric_list:
            match = re.search(pattern, metric)
            if not match:
                continue
            entity = match[1]
            if entity not in list(self._meta_graph.nodes()):
                continue
            # 特殊的指标（如网速相关）插装到sli
            if metric.split("@")[0] in self._special_sli_metrics \
                    and len(entity_metrics_mapping.get("sli").get("mapping_metrics")) < topk:
                entity_metrics_mapping.get("sli").get(
                    "mapping_metrics").append(metric)
            elif len(entity_metrics_mapping.get(entity).get("mapping_metrics")) < topk:
                entity_metrics_mapping.get(entity).get(
                    "mapping_metrics").append(metric)
        # 保证一定有前端指标
        if self._front_end_metric not in entity_metrics_mapping.get("sli").get("mapping_metrics"):
            entity_metrics_mapping.get("sli").get(
                "mapping_metrics").append(self._front_end_metric)
        nx.set_node_attributes(self._meta_graph, entity_metrics_mapping)
        logging.debug(f"元图的边: {self._meta_graph.edges()}")

    def create_structure_graph(self, maxlag: int = 2, p_threshold: int = 0.05) -> None:
        """
        根据元图做对应指标间的因果检验，构建具有因果关系的结构图
        """
        self._structure_gragh = nx.DiGraph()
        for src_node, tar_node in self._meta_graph.edges():
            for src_metric in self._meta_graph.nodes.get(src_node).get("mapping_metrics"):
                for tar_metric in self._meta_graph.nodes.get(tar_node).get("mapping_metrics"):
                    # 指标之间Granger因果检验:
                    try:
                        test_result = grangercausalitytests(
                            self._data[[tar_metric, src_metric]], maxlag=maxlag, verbose=False)
                    except:
                        logging.warn(f"granger fail: {[tar_metric, src_metric]}")
                        continue
                    mark = True
                    for _, result in test_result.items():
                        cur_p_value = result[0].get("ssr_ftest")[1]  #显著性水平
                        if cur_p_value > p_threshold:
                            mark = False
                            break
                    mark = False
                    for _, result in test_result.items():
                        cur_p_value = result[0].get("ssr_ftest")[1]  #显著性水平
                        
                        if "disk" in src_metric:
                            mark = True
                            break
                        if cur_p_value <= p_threshold:
                            mark = True
                            break
                    if mark:
                        self._structure_gragh.add_edge(src_metric, tar_metric)
                    # # 添加孤立节点
                    # elif src_metric not in self._structure_gragh.nodes():
                    #     self._structure_gragh.add_node(src_metric)
                    # elif tar_metric not in self._structure_gragh.nodes():
                    #     self._structure_gragh.add_node(tar_metric)

        # 为结构图上的指标节点添加值属性
        for node in self._structure_gragh.nodes():
            nx.set_node_attributes(self._structure_gragh, {
                                   node: {"timelist": self._data[node].tolist(), \
                                          "anomaly_score": self._data_score.get(node, 0)}})
        # 保证结构图连通
        if not nx.is_weakly_connected(self._structure_gragh):
            ## 获取弱连通分量
            for component in nx.weakly_connected_components(self._structure_gragh):
                if self._front_end_metric in list(component):
                    self._structure_gragh = self._structure_gragh.subgraph(component)
                    break
        self._structure_gragh.graph["special_sli_metrics"] = self._special_sli_metrics


def create_meta_graph(meta_data: dict) -> nx.DiGraph:
    """
    构建元图
    """
    meta_graph = nx.DiGraph()
    # 添加节点
    for node in meta_data:
        meta_graph.add_node(node)
    # 添加有向边
    for node, neighbors in meta_data.items():
        for neighbor in neighbors:
            meta_graph.add_edge(node, neighbor)
    return meta_graph


def filter_metric_not_candidates(data_dir: str, metric: pd.DataFrame) -> pd.DataFrame:
    """
    根据候选指标名单对所有指标进行过滤
    """
    with open(os.path.join(data_dir, "metric_candidates.json")) as f:
        metric_candidates = json.load(f)
    metrics_to_eliminate = []
    for column in metric.columns:
        if column not in metric_candidates + ["timestamp"]:
            metrics_to_eliminate.append(column)
    return metric.drop(columns=metrics_to_eliminate)


def generate_structure_graphs(config_dict: dict):
    """
    封装主函数给其他模块调用
    """
    data_dir = config_dict.get("args").get("data_dir")
    forward_extended_time = config_dict.get("args").get("forward_extended_time")
    backward_extended_time = config_dict.get("args").get("backward_extended_time")
    anomaly_topn = config_dict.get("args").get("anomaly_topn")
    maxlag = config_dict.get("args").get("maxlag")
    p_threshold = config_dict.get("args").get("p_threshold")
    smooth_window = config_dict.get("args").get("smooth_window")
    front_end_metric = config_dict.get("args").get("front_end_metric")
    special_sli_metrics = config_dict.get("special_sli_metrics")

    # 构建元图
    meta_graph = create_meta_graph(config_dict.get("meta_graph"))

    # 加载异常检测结果
    with open(os.path.join(data_dir, "result.json")) as f:
        results = json.load(f)

    # 读数据
    metric = pd.read_csv(os.path.join(data_dir, "metric.csv"))
    # 将date转为时间戳
    metric["timestamp"] = metric["timestamp"].apply(date_to_timestamp)
    # 使用候选指标名单过滤
    metric = filter_metric_not_candidates(data_dir, metric)
    # 数据平滑和缺失值补充
    metric = smooth_data(df=metric, window=smooth_window)  

    structure_graghs = []
    for _, result in enumerate(results):
        logging.info(
            f"""当前故障case的开始时间为: {result.get("TimeStamp")}, \
utc时间字符串为: {pd.to_datetime(result.get("TimeStamp") // 1000, unit='s')}""")
        # 截取其中一个故障case的数据
        start_time = result.get("TimeStamp") // 1000 - forward_extended_time
        end_time = result.get("TimeStamp") // 1000 + backward_extended_time
        now_metric = metric[(metric["timestamp"] >= start_time)
                            & (metric["timestamp"] < end_time)]
        del now_metric["timestamp"]

        # 去除时间窗口内保持不变的指标
        columns_to_delete = []
        for col in now_metric.columns:
            if len(now_metric[col].unique()) == 1:
                columns_to_delete.append(col)
        now_metric = now_metric.drop(columns=columns_to_delete)

        # 指标异常分数
        metric_top = {item.get("metric"): item.get("score")
                      for item in result.get("Resource").get("cause_metrics")}

        structure_gragh = StructureGraph(special_sli_metrics, meta_graph, now_metric, metric_top, front_end_metric)

        # 根据实体的异常情况修剪元图
        structure_gragh.plug_meta_graph(topk=anomaly_topn)

        # 根据元图做对应指标间的因果检验，构建具有因果关系的结构图
        structure_gragh.create_structure_graph(
            maxlag=maxlag, p_threshold=p_threshold)
        structure_graghs.append(structure_gragh._structure_gragh)

    return structure_graghs

