import os
import sys
import onnx
import onnx_graphsurgeon as gs
import numpy as np
from typing import List, Set, Dict, Tuple

# === 工程路径配置 ===
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if PROJECT_ROOT not in sys.path:
    sys.path.insert(0, PROJECT_ROOT)

# 路径配置
NMS_ONNX = os.path.join(PROJECT_ROOT, 'onnx/yolov11n_nms.onnx')
MERGE_INSERT_NODE = "/Transpose"  # 分割节点名

SAVE_BEFORE = os.path.join(PROJECT_ROOT, 'onnx/nms_before.onnx')
SAVE_AFTER = os.path.join(PROJECT_ROOT, 'onnx/nms_after.onnx')

def fix_graph_connectivity(graph: gs.Graph) -> None:
    """修复图的连接性问题"""
    print(f"修复图的连接性: {graph.name}")
    
    # 重建变量映射，避免重复
    tensor_map = {}
    node_ids_in_graph = set(id(node) for node in graph.nodes)  # 使用id而不是Node对象
    
    # 1. 清理重复的变量引用
    for node in graph.nodes:
        # 修复输入
        fixed_inputs = []
        for inp in node.inputs:
            if hasattr(inp, 'name') and inp.name:
                if inp.name not in tensor_map:
                    tensor_map[inp.name] = inp
                fixed_inputs.append(tensor_map[inp.name])
            else:
                fixed_inputs.append(inp)
        node.inputs.clear()
        for inp in fixed_inputs:
            node.inputs.append(inp)
        
        # 修复输出
        fixed_outputs = []
        for out in node.outputs:
            if hasattr(out, 'name') and out.name:
                if out.name not in tensor_map:
                    tensor_map[out.name] = out
                else:
                    # 如果已存在，优先使用当前节点的输出
                    tensor_map[out.name] = out
                fixed_outputs.append(tensor_map[out.name])
            else:
                fixed_outputs.append(out)
        node.outputs.clear()
        for out in fixed_outputs:
            node.outputs.append(out)

def fix_resize_nodes(graph: gs.Graph) -> None:
    """专门修复Resize节点的问题"""
    print("修复Resize节点...")
    
    for node in graph.nodes:
        if node.op == "Resize":
            print(f"处理Resize节点: {node.name}, 当前输入数: {len(node.inputs)}")
            
            fixed_inputs = []
            
            # 输入0: X (数据输入)
            if len(node.inputs) > 0 and node.inputs[0] is not None:
                fixed_inputs.append(node.inputs[0])
            else:
                print(f"错误: Resize节点 {node.name} 缺少数据输入")
                continue
            
            # 输入1: roi (空数组)
            roi_constant = gs.Constant(
                name=f"{node.name}_roi",
                values=np.array([], dtype=np.float32)
            )
            fixed_inputs.append(roi_constant)
            
            # 输入2: scales (如果存在且有效)
            if (len(node.inputs) > 2 and node.inputs[2] is not None and 
                hasattr(node.inputs[2], 'name') and node.inputs[2].name != ""):
                fixed_inputs.append(node.inputs[2])
            else:
                # 创建默认scales
                scales_constant = gs.Constant(
                    name=f"{node.name}_scales",
                    values=np.array([1.0, 1.0, 2.0, 2.0], dtype=np.float32)
                )
                fixed_inputs.append(scales_constant)
            
            # 关键修改：不添加sizes输入，只用scales
            # Resize节点如果只有3个输入(X, roi, scales)，就不会出现冲突
            
            node.inputs.clear()
            for inp in fixed_inputs:
                node.inputs.append(inp)
            
            print(f"  修复完成，只保留scales输入，输入数: {len(fixed_inputs)}")

def fix_unsqueeze_nodes(graph: gs.Graph) -> None:
    """修复Unsqueeze节点的问题"""
    print("修复Unsqueeze节点...")
    
    for node in graph.nodes:
        if node.op == "Unsqueeze":
            print(f"处理Unsqueeze节点: {node.name}, 当前输入数: {len(node.inputs)}")
            
            if len(node.inputs) == 2:
                data_input = node.inputs[0]
                axes_input = node.inputs[1]
                
                axes_values = None
                if isinstance(axes_input, gs.Constant):
                    axes_values = axes_input.values
                elif hasattr(axes_input, 'name') and axes_input.name:
                    for name, tensor in graph.tensors().items():
                        if isinstance(tensor, gs.Constant) and tensor.name == axes_input.name:
                            axes_values = tensor.values
                            break
                
                if axes_values is not None:
                    if hasattr(axes_values, 'tolist'):
                        axes_list = axes_values.tolist()
                    elif hasattr(axes_values, '__iter__'):
                        axes_list = list(axes_values)
                    else:
                        axes_list = [axes_values] if np.isscalar(axes_values) else list(axes_values)
                    
                    node.attrs["axes"] = axes_list
                    node.inputs.clear()
                    node.inputs.append(data_input)
                    print(f"  将{node.name}的axes转为属性: {axes_list}")
                else:
                    node.attrs["axes"] = [0]
                    node.inputs.clear()
                    node.inputs.append(data_input)

def fix_squeeze_nodes(graph: gs.Graph) -> None:
    """修复Squeeze节点的问题"""
    print("修复Squeeze节点...")
    
    for node in graph.nodes:
        if node.op == "Squeeze":
            print(f"处理Squeeze节点: {node.name}")
            
            if len(node.inputs) == 2:
                data_input = node.inputs[0]
                axes_input = node.inputs[1]
                
                axes_values = None
                if isinstance(axes_input, gs.Constant):
                    axes_values = axes_input.values
                elif hasattr(axes_input, 'name') and axes_input.name:
                    for name, tensor in graph.tensors().items():
                        if isinstance(tensor, gs.Constant) and tensor.name == axes_input.name:
                            axes_values = tensor.values
                            break
                
                if axes_values is not None:
                    if hasattr(axes_values, 'tolist'):
                        axes_list = axes_values.tolist()
                    elif hasattr(axes_values, '__iter__'):
                        axes_list = list(axes_values)
                    else:
                        axes_list = [axes_values] if np.isscalar(axes_values) else list(axes_values)
                    
                    node.attrs["axes"] = axes_list
                    node.inputs.clear()
                    node.inputs.append(data_input)
                    print(f"  将{node.name}的axes转为属性: {axes_list}")

def fix_split_node_inputs(nodes: List[gs.Node], graph: gs.Graph) -> None:
    """修复Split节点的输入"""
    for node in nodes:
        if node.op == "Split" and len(node.inputs) == 2:
            try:
                data_input = node.inputs[0]
                split_param = node.inputs[1]
                
                const = None
                if isinstance(split_param, gs.Constant):
                    const = split_param
                else:
                    const_tensor = graph.tensors().get(split_param.name, None)
                    if isinstance(const_tensor, gs.Constant):
                        const = const_tensor
                
                if const is not None:
                    if hasattr(const.values, "tolist"):
                        split_values = const.values.tolist()
                    elif hasattr(const.values, "__iter__"):
                        split_values = list(const.values)
                    else:
                        split_values = [const.values] if np.isscalar(const.values) else list(const.values)
                    
                    node.attrs["split"] = split_values
                    node.inputs.clear()
                    node.inputs.append(data_input)
                    print(f"修复Split节点 {node.name}: split={split_values}")
            except Exception as e:
                print(f"[Error] 修复Split节点 {node.name} 时出错: {e}")

def bfs_downstream(start_nodes: List[gs.Node], all_nodes: List[gs.Node]) -> List[gs.Node]:
    """广度优先搜索下游节点"""
    visited = set()
    queue = list(start_nodes)
    
    while queue:
        node = queue.pop(0)
        nid = id(node)
        if nid in visited:
            continue
        visited.add(nid)
        
        for out_var in node.outputs:
            for n in all_nodes:
                if out_var in n.inputs and id(n) not in visited:
                    queue.append(n)
    
    return [n for n in all_nodes if id(n) in visited]

def create_clean_variable(original_var: gs.Variable, name_suffix: str = "") -> gs.Variable:
    """创建干净的变量副本"""
    new_name = original_var.name + name_suffix if hasattr(original_var, 'name') else f"var_{id(original_var)}"
    return gs.Variable(
        name=new_name,
        dtype=getattr(original_var, 'dtype', np.float32),
        shape=getattr(original_var, 'shape', None)
    )

def build_clean_subgraph(nodes: List[gs.Node], graph_inputs: List[gs.Variable], 
                        graph_outputs: List[gs.Variable], name: str) -> gs.Graph:
    """构建干净的子图"""
    print(f"构建干净的子图: {name}")
    
    # 创建变量映射
    var_mapping = {}
    
    # 为输入创建新变量
    clean_inputs = []
    for inp in graph_inputs:
        if hasattr(inp, 'name') and inp.name:
            clean_var = create_clean_variable(inp)
            var_mapping[inp.name] = clean_var
            clean_inputs.append(clean_var)
    
    # 为输出创建新变量
    clean_outputs = []
    for out in graph_outputs:
        if hasattr(out, 'name') and out.name:
            if out.name not in var_mapping:
                clean_var = create_clean_variable(out)
                var_mapping[out.name] = clean_var
            clean_outputs.append(var_mapping[out.name])
    
    # 复制节点并更新变量引用
    clean_nodes = []
    for node in nodes:
        # 创建新节点
        new_node = gs.Node(
            op=node.op,
            name=node.name,
            attrs=node.attrs.copy() if hasattr(node, 'attrs') else {}
        )
        
        # 处理输入
        new_inputs = []
        for inp in node.inputs:
            if isinstance(inp, gs.Constant):
                new_inputs.append(inp)  # 常量直接使用
            elif hasattr(inp, 'name') and inp.name:
                if inp.name not in var_mapping:
                    var_mapping[inp.name] = create_clean_variable(inp)
                new_inputs.append(var_mapping[inp.name])
            else:
                new_inputs.append(inp)
        
        # 处理输出
        new_outputs = []
        for out in node.outputs:
            if hasattr(out, 'name') and out.name:
                if out.name not in var_mapping:
                    var_mapping[out.name] = create_clean_variable(out)
                new_outputs.append(var_mapping[out.name])
            else:
                new_outputs.append(out)
        
        new_node.inputs = new_inputs
        new_node.outputs = new_outputs
        clean_nodes.append(new_node)
    
    # 创建图
    clean_graph = gs.Graph(
        nodes=clean_nodes,
        inputs=clean_inputs,
        outputs=clean_outputs,
        name=name
    )
    
    return clean_graph

def validate_onnx_model(model_path: str, model_name: str) -> bool:
    """验证ONNX模型"""
    try:
        model = onnx.load(model_path)
        onnx.checker.check_model(model)
        print(f"[OK] {model_name} 模型验证通过")
        return True
    except Exception as e:
        print(f"[Error] {model_name} 模型验证失败: {e}")
        return False

def main():
    """主函数"""
    print(f"载入NMS模型: {NMS_ONNX}")
    
    if not os.path.exists(NMS_ONNX):
        print(f"[Error] 输入文件不存在: {NMS_ONNX}")
        return False
    
    try:
        nms_graph = gs.import_onnx(onnx.load(NMS_ONNX))
        print(f"成功载入原始模型，包含 {len(nms_graph.nodes)} 个节点")
    except Exception as e:
        print(f"[Error] 载入模型失败: {e}")
        return False
    
    # 1. 定位分割节点
    transpose_candidates = [n for n in nms_graph.nodes if n.name == MERGE_INSERT_NODE]
    if len(transpose_candidates) != 1:
        print(f"[Error] 未找到唯一的分割节点 {MERGE_INSERT_NODE}")
        return False
    
    transpose_node = transpose_candidates[0]
    print(f"找到分割节点: {transpose_node.name}")
    
    # 2. 分组节点
    transpose_and_after = bfs_downstream([transpose_node], nms_graph.nodes)
    before_nodes = [n for n in nms_graph.nodes if n not in transpose_and_after]
    
    print(f"Before部分节点数: {len(before_nodes)}")
    print(f"After部分节点数: {len(transpose_and_after)}")
    
    # 3. 修复Split节点
    fix_split_node_inputs(before_nodes, nms_graph)
    fix_split_node_inputs(transpose_and_after, nms_graph)
    
    # 4. 找到连接点
    after_inputs_name_set = set()
    for n in transpose_and_after:
        for var in n.inputs:
            if hasattr(var, 'name') and var.name:
                after_inputs_name_set.add(var.name)
    
    before_outputs = []
    seen_names = set()
    for n in before_nodes:
        for v in n.outputs:
            if (hasattr(v, 'name') and v.name and 
                v.name in after_inputs_name_set and 
                v.name not in seen_names):
                before_outputs.append(v)
                seen_names.add(v.name)
    
    before_inputs = []
    for inp in nms_graph.inputs:
        for n in before_nodes:
            if inp in n.inputs:
                before_inputs.append(inp)
                break
    
    # 收集After的其他输入
    after_inputs = []
    seen_input_names = set()
    
    # 添加来自before的输出
    for var in before_outputs:
        if hasattr(var, 'name') and var.name and var.name not in seen_input_names:
            after_inputs.append(var)
            seen_input_names.add(var.name)
    
    for node in transpose_and_after:
        for inp in node.inputs:
            # 跳过常量
            if isinstance(inp, gs.Constant):
                continue
            if (hasattr(inp, 'name') and inp.name and 
            inp.name not in seen_input_names):
                # 检查是否是内部变量
                is_internal = False
                for n in transpose_and_after:
                    for out in n.outputs:
                        if hasattr(out, 'name') and out.name == inp.name:
                            is_internal = True
                            break
                    if is_internal:
                        break
                
                if not is_internal:
                    after_inputs.append(inp)
                    seen_input_names.add(inp.name)
    
    # 5. 构建干净的图
    before_graph = build_clean_subgraph(before_nodes, before_inputs, before_outputs, "nms_before")
    after_graph = build_clean_subgraph(transpose_and_after, after_inputs, nms_graph.outputs, "nms_after")
    
    # 6. 修复特定节点类型
    fix_resize_nodes(before_graph)
    fix_resize_nodes(after_graph)
    fix_unsqueeze_nodes(before_graph)
    fix_unsqueeze_nodes(after_graph)
    fix_squeeze_nodes(before_graph)
    fix_squeeze_nodes(after_graph)
    
    # 7. 修复图连接性
    fix_graph_connectivity(before_graph)
    fix_graph_connectivity(after_graph)
    
    # 8. 导出模型
    print(f"\n导出Before模型到: {SAVE_BEFORE}")
    os.makedirs(os.path.dirname(SAVE_BEFORE), exist_ok=True)
    
    try:
        onnx.save(gs.export_onnx(before_graph), SAVE_BEFORE)
        print("Before模型导出成功")
    except Exception as e:
        print(f"Before模型导出失败: {e}")
        return False
    
    print(f"导出After模型到: {SAVE_AFTER}")
    os.makedirs(os.path.dirname(SAVE_AFTER), exist_ok=True)
    
    try:
        onnx.save(gs.export_onnx(after_graph), SAVE_AFTER)
        print("After模型导出成功")
    except Exception as e:
        print(f"After模型导出失败: {e}")
        return False
    
    # 9. 验证模型
    before_valid = validate_onnx_model(SAVE_BEFORE, "Before")
    after_valid = validate_onnx_model(SAVE_AFTER, "After")
    
    if before_valid and after_valid:
        print("\n[SUCCESS] 模型拆分完成，验证通过！")
        print(f"Before模型: {SAVE_BEFORE}")
        print(f"After模型: {SAVE_AFTER}")
        return True
    else:
        print("\n[WARNING] 模型拆分完成，但存在验证问题")
        return False

if __name__ == "__main__":
    success = main()
    if success:
        print("\n下一步可以使用PPQ对nms_before.onnx进行量化")
    else:
        print("\n请检查并修复上述问题")