import pandas as pd
import numpy as np
import os
import json
import pickle
import warnings
from datetime import datetime, timedelta
from tqdm import tqdm
from data_process import *
from detect import *
import logging
logger = logging.getLogger(__name__)
# log_path = '/home/dds/yjq/TraceAnomaly/trace-anomaly.log'
log_path = '/home/dds/yjq/TraceAnomaly/trace-anomaly-tt.log'
file_handler = logging.FileHandler(log_path)
formatter = logging.Formatter("%(asctime)s\t%(levelname)s\t%(message)s",
                              datefmt="%Y-%m-%d %H:%M:%S")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
logger.setLevel(logging.INFO)

warnings.filterwarnings("ignore")

def get_trace_rca(trace_byid_dict, trace_id, call_path_index_dict):
    trace_demo = trace_byid_dict[trace_id]
    # 从底部往上
    traces = trace_demo.iloc[::-1].reset_index(drop=True)
    nodes = {}

    root_node = None

    # 不打乱顺序
    for span_id, span in traces.groupby('SpanID', sort=False):
        pod_name = span['PodName'].values[0]
        operation_name = span['OperationName'].values[0]
        duration = span['Duration'].values[0]
        span_node = SpanNode(span_id=span_id, pod_name=pod_name, operation_name=operation_name, duration=duration)
        nodes[span_id] = span_node

    for span_id, span in traces.groupby('SpanID', sort=False):
        span_node = nodes[span_id]
        if span['ParentID'].values[0] == 'root':
            root_node = span_node
        for span_id_child, span_child in trace_demo.groupby('SpanID'):
            if span_child['ParentID'].values[0] == span_id:
                span_node.add_child(nodes[span_id_child])
    calls = []
    responses = []
    def add_edges(parent, node):
        if parent is not None:
            calls.append(parent.get_span_name())
            calls.append(node.get_span_name())
            responses.append(node.duration)
        if node is None:
            return
        for child in node.children:
            add_edges(node, child)
    add_edges(None, root_node)
    # 获取并打印所有深度优先遍历路径及其持续时间总和
    if root_node == None:
        return None
    paths_with_durations = root_node.dfs_paths_with_durations()
    longest_len = 0
    rca = ""
    for path, duration in paths_with_durations:
        call = '#'.join([node.get_span_name() for node in path])
        if call not in call_path_index_dict.keys():
            return path[-1].pod_name
        if duration > call_path_index_dict[call][0] + 3 * call_path_index_dict[call][1]:
            if len(path)>longest_len:
                longest_len = len(path)
                rca = path[-1].pod_name
                # rca = call
    return rca


def traceanomaly_rca(current_time, call_path_index_dict, abnormal_trace_list):
    file = f'/home/dds/yjq/data/{datetime.strftime(current_time, "%Y-%m-%d")}/trace/{datetime.strftime(current_time, "%H_%M")}_trace.csv'
    total_data = pd.read_csv(file)
    trace_byid = total_data.groupby('TraceID', sort=False)
    trace_byid_dict = {}
    trace_ids = []
    for trace_id, spans in trace_byid:
        trace_byid_dict[trace_id] = spans
        trace_ids.append(trace_id)
    # (get_trace_calls(trace_byid_dict, abnormal_trace_list[0]))
    rca_result = {}
    for trace_id in trace_ids:
        if trace_id in abnormal_trace_list:
            rca = get_trace_rca(trace_byid_dict, trace_id, call_path_index_dict)
            if rca not in rca_result.keys():
                rca_result[rca] = 0
            rca_result[rca] += 1
    rca_result = sorted(rca_result.items(), key=lambda item: -item[1])
    return rca_result

if __name__ == "__main__":
    with open("idx.pkl", "rb") as f:
        call_path_index_dict=pickle.load(f)
    call_path_index = list(call_path_index_dict.keys())
    inject_jsons = [
        "/home/dds/yjq/data/2025-01-14/2025-01-14-fault_list.json",
        "/home/dds/yjq/data/2025-01-15/2025-01-15-fault_list.json"
    ]
    # 打开并读取JSON文件
    file_arr = []
    for json_file in inject_jsons:
        with open(json_file, 'r') as file:
            file_arr.append(json.load(file))

    inject_faults = {}
    for single_file in file_arr:
        for hour, faults in single_file.items():
            for fault in faults:
                inject_faults[fault["inject_time"][:-3]] = [fault["inject_pod"], fault["inject_type"]]
    original_data_path = "/home/dds/yjq/data/"
    output_path = "/home/dds/yjq/TraceAnomaly/tt/"
    top1 = 0
    top3 = 0
    top5 = 0
    i = 0
    for inject_time, [inject_pod, inject_type] in inject_faults.items():
        rca_time = datetime.strptime(inject_time, "%Y-%m-%d %H:%M")+timedelta(minutes=1)
        logger.info("**************************************************************************")
        logger.info(f'{datetime.strptime(inject_time, "%Y-%m-%d %H:%M")}: {[inject_pod, inject_type]}')
        span_list_current = get_span_list(detect_time=rca_time)
        i = 0
        while len(span_list_current) == 0 :
            rca_time = rca_time + timedelta(minutes=1)
            span_list_current = get_span_list(detect_time=rca_time)
            i +=1 
            if i > 5:
                break
        output_rca = output_path  + datetime.strftime(rca_time, "%Y-%m-%d/%H_%M") 
        if not os.path.exists(output_rca):
            process_file_to_stv(rca_time, call_path_index, output_rca)
        detect_time = rca_time
        train_file = "/home/dds/yjq/TraceAnomaly/tt/train"
        output_file = datetime.strftime(detect_time, "/home/dds/yjq/TraceAnomaly/tt/%Y-%m-%d/output/%H_%M.csv")
        detect_file = datetime.strftime(detect_time, "/home/dds/yjq/TraceAnomaly/tt/%Y-%m-%d/%H_%M")
        command = f"python -m traceanomaly.main --trainfile {train_file} --detectfile {detect_file} --outputfile {output_file}"
        os.popen(command).read()
        df = pd.read_csv(output_file)
        abnormal_list = list(df[(abs(df['score']) > 3)]["id"])
        logger.info(f"abnormal num: {len(abnormal_list)}")
        result = traceanomaly_rca(detect_time, call_path_index_dict, abnormal_list)
    
        logger.info(result[1:])
        re_list = []
        for i in range(len(result)):
            re_list.append(result[i][0])
        if inject_pod in re_list[:2] or \
            ((inject_type == "deploy" or inject_type == "load") and inject_pod.split("-")[0] in re_list[:1]):
            top1 += 1
        if inject_pod in " ".join(re_list[:4]) or \
            ((inject_type == "deploy" or inject_type == "load") and inject_pod.split("-")[0] in " ".join(re_list[:4])):
            top3 += 1
        if inject_pod in " ".join(re_list[:6]) or \
            ((inject_type == "deploy" or inject_type == "load") and inject_pod.split("-")[0] in " ".join(re_list[:6])):
            top5 += 1
    logger.info(f"top1: {top1/len(inject_faults)}")
    logger.info(f"top3: {top3/len(inject_faults)}")
    logger.info(f"top5: {top5/len(inject_faults)}")