"""
Trace file loading and parsing utilities.
"""

import copy
import json
import os
import pickle
import random
import string
from pathlib import Path

import config
from span import Span


def GenerateRandomID(length=16, suffix=''):
    x = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(length))
    x = x + suffix
    return x


class TraceLoader:
    """负责加载和解析 trace 文件的类"""

    def __init__(self, fix_mode=7):
        """
        初始化 TraceLoader
        
        Args:
            fix_mode: 修复模式
        """
        self.fix_mode = fix_mode

        # 全局数据存储
        self.all_spans = {}
        self.all_processes = {}  # trace_id 索引
        self.in_spans_by_process = {}  # process 索引
        self.out_spans_by_process = {}  # process 索引

        # Loop maps for self-loop handling
        self.selfLoopMap = {}
        self.serviceLoopMap = {}

        self.new_process_count = 0
        self.new_process_reverse_map = {}
        self.new_processes = {}

    def get_first_span_name(self):
        """根据 fix_mode 获取 first_span 名称"""
        first_span_map = {
            0: "init-span",  # nodejs
            1: "ComposeReview",  # media
            2: "HTTP GET /hotels",  # hotel
            3: "HTTP GET /recommendations",  # hotel
            4: "[Todo] CompleteTodoCommandHandler",  # hotel
            5: "productpage.default.svc.cluster.local:9080/productpage",  # bookinfo
            6: "POST",  # otel-demo
            # POST 操作来自 load-generator/frontend-web 进程。如果 first_span 设成这个，会有 frontend-proxy 进程的 in_spans。
            7: "ingress"  # otel-demo
            # ingress 操作来自 frontend-proxy 进程，且所有数据集都是从 ingress 开始的。如果 first_span 设成这个，不会有 frontend-proxy 进程的 in_spans。
        }
        return first_span_map.get(self.fix_mode, None)

    def parse_spans_json(self, spans_json, first_span):
        """
        解析 spans JSON 并处理 self-loops
        
        Args:
            spans_json: spans 的 JSON 数据
            first_span: 第一个 span 的名称
            
        Returns:
            tuple: (spans, selfLoopMap, serviceLoopMap, spans_json)
        """
        spans = {}
        overall_trace_id = None

        # Step 1: Create Span objects without linking
        for span in spans_json:
            span_kind = None
            span_scope = None
            for tag in span["tags"]:
                if tag["key"] == "span.kind":
                    span_kind = tag["value"]
                elif tag["key"] == "otel.scope.name":
                    span_scope = tag["value"]

            process_id = span["processID"]
            trace_id = span["traceID"]
            sid = span["spanID"]
            span_id = (trace_id, sid)
            start_mus = span["startTime"]
            duration_mus = span["duration"]
            if "requestType" in span.keys():
                op_name = span.get("requestType", None)
            else:
                op_name = span.get("operationName", None)

            if overall_trace_id is None:
                overall_trace_id = trace_id
            else:
                if trace_id != overall_trace_id:
                    print("Different trace ids for spans in the same trace!")
                    assert False

            references = []
            for ref in span["references"]:
                references.append((ref["traceID"], ref["spanID"]))

            if first_span is None:
                if span_kind == "client":
                    sid = sid + ".client"
                    span_id = (trace_id, sid)

                if span_kind == "server":
                    if len(references) == 1:
                        references[0] = (references[0][0], sid + '.client')

            # 未指定 first_span 根操作，通过调用关系判断一个。
            if first_span is None:
                if span["caller"] == span["callee"]:
                    sanitized_sid = sid
                    if sanitized_sid.endswith('.client'):
                        sanitized_sid = sanitized_sid[:-7]
                    original_callee = span["callee"]
                    if sanitized_sid not in self.selfLoopMap:
                        new_callee = GenerateRandomID(suffix="-loop")
                        self.selfLoopMap[sanitized_sid] = [original_callee, new_callee]
                        self.serviceLoopMap[new_callee] = original_callee
                    span["callee"] = self.selfLoopMap[sanitized_sid][1]
                    if span_kind == "server":
                        process_id = self.selfLoopMap[sanitized_sid][1]
                        span["processID"] = process_id

            my_span = Span(
                trace_id,
                sid,
                start_mus,
                duration_mus,
                op_name,
                references,
                process_id,
                span_kind,
                span_scope,
                span["tags"]
            )
            my_span.ExtractContent()
            spans[span_id] = my_span

        # 未指定 first_span 根操作
        if first_span is None:
            # Step 2: Create a temporary structure to hold child references
            temp_children = {}

            for span_id, span in spans.items():
                if not span.IsRoot():
                    parent_id = span.references[0]
                    if parent_id not in temp_children:
                        temp_children[parent_id] = []
                    temp_children[parent_id].append(span_id)

            # Step 3: Link spans using the temporary structure
            for parent_id, children in temp_children.items():
                if parent_id in spans:
                    for child_id in children:
                        spans[parent_id].AddChild(child_id)

            # Check time order constraint
            def check_time_constraints(span):
                for child_id in span.children_spans:
                    child = spans[child_id]
                    if not (
                            span.start_mus <= child.start_mus and
                            (span.start_mus + span.duration_mus) >= (child.start_mus + child.duration_mus)
                    ):
                        print(f"Time constraint violated between span {span.sid} and its child {child.sid}")
                        return False
                    if not check_time_constraints(child):
                        return False
                return True

            root_span = next((span for span in spans.values() if span.IsRoot()), None)
            if root_span and not check_time_constraints(root_span):
                return None, self.selfLoopMap, self.serviceLoopMap, spans_json

            # Step 4: Update references for descendants of self-loop spans
            def update_references(span):
                for child_id in span.children_spans:
                    child = spans[child_id]
                    if child.kind == "client":
                        child.process_id = spans[(span.trace_id, span.sid)].process_id
                    # Recursively update references for all descendants
                    update_references(child)

            def traverse_and_update(span):
                sanitized_sid = span.sid
                if sanitized_sid.endswith('.client'):
                    sanitized_sid = sanitized_sid[:-7]
                if sanitized_sid in self.selfLoopMap:
                    update_references(span)
                for child_id in span.children_spans:
                    child = spans[child_id]
                    traverse_and_update(child)

            if root_span:
                traverse_and_update(root_span)

            for span_id, span in spans.items():
                span.children_spans = []

        return spans, self.selfLoopMap, self.serviceLoopMap, spans_json

    def parse_processes_json(self, processes_json):
        """解析 processes JSON"""
        processes = {}
        for pid in processes_json:
            processes[pid] = processes_json[pid]["serviceName"]
        return processes

    def parse_processes_json_legacy(self, spans_json):
        """旧版从 spans JSON 解析 processes"""
        processes = {}
        for span in spans_json:
            processes[span["processID"]] = span["processID"]
        return processes

    def fix_spans_7(self, spans, processes):
        """修复 spans (模式 7)"""
        pass

    # unused
    def fix_spans_0(self, spans, processes):
        """修复 spans (模式 0)"""
        process_map_1 = {
            "service5": "service3",
            "service4": "service2",
            "service2": "service1",
            "service3": "service1",
            "service1": "init-service"
        }
        process_map_2 = {}

        def get_process_of_span(span_id):
            pid = spans[span_id].process_id
            return processes[pid]

        for span_id, span in spans.items():
            process = get_process_of_span(span_id)
            process_map_2[process] = span.process_id

        new_spans = {}
        for span_id, span in spans.items():
            process = get_process_of_span(span_id)
            if span.kind == "client":
                span.kind = "server"
            elif span.kind == "server":
                span_copy = copy.deepcopy(span)
                copy_ref = copy.deepcopy(span.references)
                span.references[0] = (copy_ref[0][0], span.sid + "_client")
                client_process = process_map_1[process]
                span_copy.sid = span_copy.sid + "_client"
                span_copy.process_id = process_map_2[client_process]
                span_copy.kind = "client"
                span_copy.references = copy_ref
                new_spans[(span_copy.trace_id, span_copy.sid)] = span_copy

        spans.update(new_spans)
        return spans

    # unused
    def fix_spans_2(self, spans, processes):
        """修复 spans (模式 2)"""

        def find_parent_process(id):
            return spans[id].process_id

        def find_grandparent_process(id):
            for span_id, span in spans.items():
                if span_id == id and len(span.references) != 0:
                    return find_parent_process(span.references[0])
            return None

        def delete_ancestors(id):
            if len(spans[id].references) != 0:
                delete_ancestors(spans[id].references[0])
            del new_spans[id]

        def change_child_references(id):
            for span_id, span in spans.items():
                if len(span.references) != 0:
                    if span.references[0] == id:
                        new_ref = (span.trace_id, span.trace_id)
                        new_spans[span_id].references[0] = new_ref

        new_spans = copy.deepcopy(spans)
        for span_id, span in spans.items():
            if span.op_name == "ComposeReview":
                delete_ancestors(span.references[0])
                change_child_references(span_id)
                span.sid = span.trace_id
                span.references = []
                new_spans[(span.trace_id, span.sid)] = span
                del new_spans[span_id]

        spans = copy.deepcopy(new_spans)
        for span_id, span in spans.items():
            if len(span.references) != 0:
                parent_process = find_parent_process(span.references[0])
                if parent_process is not None:
                    if parent_process == span.process_id:
                        del new_spans[span_id]

        spans = copy.deepcopy(new_spans)
        new_spans2 = {}

        for span_id, span in spans.items():
            span.kind = "server"
            if len(span.references) != 0:
                span_copy = copy.deepcopy(span)
                copy_ref = copy.deepcopy(span.references)
                span.references[0] = (copy_ref[0][0], span.sid + "_client")
                span_copy.sid = span_copy.sid + "_client"
                span_copy.process_id = find_parent_process(copy_ref[0])
                span_copy.kind = "client"
                span_copy.references = copy_ref
                new_spans2[(span_copy.trace_id, span_copy.sid)] = span_copy

        spans.update(new_spans2)

        new_process_map = {}
        spans = {k: v for k, v in sorted(spans.items(), key=lambda item: item[1].start_mus)}

        multiple_map = {}

        def update_map():
            nonlocal multiple_map
            multiple_map = {}
            for span_id, span in spans.items():
                if span.kind == "server":
                    if span.process_id in processes:
                        process_name = processes[span.process_id]
                    else:
                        process_name = self.new_processes[span.process_id]

                    if process_name not in multiple_map:
                        multiple_map[process_name] = []
                    if len(span.references) != 0:
                        pid = find_parent_process(span.references[0])
                        if pid is not None:
                            if pid in processes:
                                incoming = processes[pid]
                            else:
                                incoming = self.new_processes[pid]
                            multiple_map[process_name].append(incoming)

        update_map()

        processes.update(self.new_processes)
        self.all_spans.update(spans)

        return spans, processes

    def parse_json_trace(self, json_data):
        """
        加载单条 trace
        
        Args:
            json_data: 单条 trace

        Returns:
            tuple: (trace_id, spans, processes, selfLoopMap, serviceLoopMap)
        """
        first_span = self.get_first_span_name()
        assert first_span
        ret = []
        processes = None

        d = json_data  # 字典 d
        trace_id = d["traceID"]
        spans, self.selfLoopMap, self.serviceLoopMap, d["spans"] = self.parse_spans_json(
            d["spans"], first_span
        )
        if spans is None:
            return None, None, None, self.selfLoopMap, self.serviceLoopMap

        if "requestType" in d["spans"][0].keys():
            processes = self.parse_processes_json_legacy(d["spans"])
        else:
            processes = self.parse_processes_json(d["processes"])

        if first_span == "init-span":
            spans = self.fix_spans_0(spans, processes)  # 新增
        elif first_span == "ingress":
            self.fix_spans_7(spans, processes)  # 原地
        elif first_span == "ComposeReview":
            spans, processes = self.fix_spans_2(spans, processes)  # 新增

        root_service = None
        for span_id, span in spans.items():
            if span.op_name == first_span or len(span.references) == 0:
                root_service = span.op_name
        if root_service is not None:
            ret.append((trace_id, spans))

        assert len(ret) == 1
        trace_id, spans = ret[0]

        return trace_id, spans, processes, self.selfLoopMap, self.serviceLoopMap

    def process_trace_data(self, trace_id, spans, processes):
        """
        处理 trace 数据，将 spans 按 process 分类
        
        Args:
            trace_id: trace ID
            spans: spans 字典
            processes: processes 字典
            
        Returns:
            int: 处理成功返回 1，否则返回 0
        """
        first_span = self.get_first_span_name()
        assert first_span

        def add_span_to_process(span_id):
            span = spans[span_id]
            process = processes[span.process_id]
            spans[span_id].process = process  # 惰性更新 span.process 属性
            if span.IsInSpan(process):  # 下推判断
                if process not in self.in_spans_by_process:
                    self.in_spans_by_process[process] = []
                self.in_spans_by_process[process].append(span)
            elif span.IsOutSpan(process):  # 下推判断
                if process not in self.out_spans_by_process:
                    self.out_spans_by_process[process] = []
                self.out_spans_by_process[process].append(span)

        root_span_id = None
        # populate children
        for span_id, span in spans.items():
            if span.op_name == first_span or len(span.references) == 0:
                root_span_id = span_id
            for par_id in span.references:
                if par_id in spans:
                    spans[par_id].AddChild(span.GetId())

        for span_id, span in spans.items():
            span.children_spans.sort(
                key=lambda child_span_id: spans[child_span_id].start_mus
            )

        def explore_subtree(span_id, depth):
            span = spans[span_id]
            add_span_to_process(span_id)
            for child in span.children_spans:
                explore_subtree(child, depth + 1)

        if spans[root_span_id].op_name == first_span or first_span is None:
            explore_subtree(root_span_id, 0)
            # 更新 spans 数据集
            self.all_spans.update(spans)
            # 更新进程映射表
            self.all_processes[trace_id] = processes
            return 1
        return 0

    def calculate_trace_start_time(self, files):
        """计算 trace 的开始时间"""
        start_times = []
        for i, afile in enumerate(files):
            if config.VERBOSE:
                print("Calculating start time for ", i)
            with open(afile, 'r') as f:
                json_data = json.load(f)
                data = json_data.get("data", [])
                if not data:
                    start_times.append(float('inf'))
                    continue

                trace = data[0]
                spans = trace.get("spans", [])
                if not spans:
                    start_times.append(float('inf'))
                    continue

                # Identify the root span (span with no references)
                root_span = next((span for span in spans if len(span.get("references", [])) == 0), None)
                if not root_span:
                    start_times.append(float('inf'))
                    continue

                start_times.append(float(root_span["startTime"]))

        return start_times

    def time_order(self, files):
        """按时间排序文件"""
        start_times = self.calculate_trace_start_time(files)
        sorted_indices = sorted(range(len(start_times)), key=lambda i: start_times[i])
        sorted_files = [files[i] for i in sorted_indices]
        return sorted_files

    def get_all_traces_in_dir(self, directory, clear_cache=False):
        """
        获取目录中的所有 trace 文件，按时间排序
        
        Args:
            directory: trace 文件目录
            clear_cache: 是否清除缓存
            
        Returns:
            list: 排序后的文件路径列表
        """
        sorted_filenames_path = Path(directory) / "time_order_filenames.pickle"
        if clear_cache:
            if os.path.exists(sorted_filenames_path):
                os.remove(sorted_filenames_path)

        if os.path.exists(sorted_filenames_path):
            with open(sorted_filenames_path, "rb") as f:
                files = pickle.load(f)
        else:
            files = [f for f in os.listdir(directory) if os.path.isfile(os.path.join(directory, f))]
            files = [f for f in files if f.endswith("json")]
            full_path = os.path.abspath(directory)
            files = [os.path.join(full_path, f) for f in files]
            files = self.time_order(files)

            with open(sorted_filenames_path, "wb") as f:
                pickle.dump(files, f)

        return files

    def load_traces(self, directory, compressed=False, clear_cache=False, max_traces=None):
        """
        加载目录中的所有 traces
        
        Args:
            directory: trace 文件目录
            compressed: 目录是否压缩
            clear_cache: 是否清除缓存
            max_traces: 最大加载 trace 数量（None 表示不限制）
            
        Returns:
            tuple: (in_spans_by_process, out_spans_by_process, all_spans, all_processes)
        """
        directory = directory.rstrip('\\')

        traces = self.get_all_traces_in_dir(directory, clear_cache)

        # 冒烟测试给部分样本就够了
        if config.DEBUG:
            traces = traces[:int(len(traces) * 0.2)]

        cnt = 0
        for i, trace in enumerate(traces):
            if config.VERBOSE and i % 1 == 0:
                print("Loading traces:", i)

            trace_id, spans, processes, _, _ = self.parse_json_trace(trace)

            if trace_id is None:
                continue

            cnt += self.process_trace_data(trace_id, spans, processes)

            if max_traces is not None and cnt >= max_traces:
                break

        if compressed:
            import shutil
            shutil.rmtree(directory + "/")

        return (
            self.in_spans_by_process,
            self.out_spans_by_process,
            self.all_spans,
            self.all_processes
        )
