import onnx.numpy_helper as numpy_helper
import onnx
import copy
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import onnxruntime

model_file = "/home/e00974/graph_lla/golden_gen/conformer/conformer_encoder_inf_3m2_scr_em2.onnx"
model_file = "encoder_shapeinfer_fused_static_inputmask.onnx"

from onnx_files import const_onnx, inputmask_onnx
model_file = inputmask_onnx

model = onnx.load(model_file)
graph = model.graph
rmv_list = []

def swap_input(node):
    tmp = node.input[0]
    node.input[0] = node.input[1]
    node.input[1] = tmp

def add_new_reshape_node(i, add_node, model):
    value_reshape = np.array([4, 64, -1]).astype(np.int64)
    value = onnx.helper.make_tensor(name=f'R_new_{i}_2', data_type=onnx.TensorProto.INT64, dims=value_reshape.shape, vals=value_reshape)
    const_i = onnx.helper.make_node(name=f'R_new_{i}_2const', op_type='Constant', inputs=[], outputs=[f'R_new_{i}_2'], value=value)
    model.graph.node.append(const_i)
    reshape_node = onnx.helper.make_node(name=f'Reshape_new_{i}_2', op_type='Reshape', inputs=[add_node.output[0], f'R_new_{i}_2'], \
                                        outputs=[f'Reshape_new_{i}_2_out'])
    model.graph.node.append(reshape_node)
    return reshape_node

if __name__ == '__main__':
    biaslist_u = []
    biaslist_v = []
    add_new = []
    for t in model.graph.initializer:
        if "self_attn.pos_bias_u" in t.name or "self_attn.pos_bias_v" in t.name:
            if "self_attn.pos_bias_u" in t.name:
                biaslist_u.append(t.name)
            if "self_attn.pos_bias_v" in t.name:
                biaslist_v.append(t.name)
            num = t.name.split(".")[2]
            value = numpy_helper.to_array(t).astype(np.float32)
            new_add_value = value.reshape((256))
            value = onnx.helper.make_tensor(name=f"{t.name}.{num}", data_type=onnx.TensorProto.FLOAT, dims=new_add_value.shape, vals=new_add_value)
            const_i = onnx.helper.make_node(name=f"{t.name}.{num}", op_type='Constant', inputs=[], outputs=[t.name], value=value)
            model.graph.node.append(const_i)

            for i, node0 in enumerate(graph.node):
                if node0.op_type == "Add" and "self_attn.pos_bias" in node0.input[1]:
                    add_new.append(node0)
                    rmv_list.append(graph.node[i + 1])
                    if const_i.output[0] == node0.input[1]:
                        node0.input[1] = const_i.output[0] # initialize to constant
            
                    for j, node1 in enumerate(graph.node):
                        if node1.op_type == "Reshape" and node1.output[0] == node0.input[0]:
                            if "self_attn.pos_bias_u" in node0.input[1]:
                                graph.node[j - 1].output[0] = node0.input[0]
                                node0.output[0] = node1.input[0]
                                node1.output[0] = graph.node[i + 5].input[0]
    
    # Add shape=[4, 64, -1] to Add and Transpose
    for i, node0 in enumerate(graph.node):
        if node0.op_type == "Add" and "self_attn.pos_bias_v" in node0.input[1]:
            num = node0.input[1].split(".")[2]
            reshape_node = add_new_reshape_node(num, node0, model)
            graph.node[i + 5].input[0] = reshape_node.output[0]
                            
    for i, node0 in enumerate(graph.node):
        if node0.op_type == "Transpose":
            # if (int(node0.name.split("_")[-1]) - 307) % 201 == 0:
            if "self_attn/Transpose_3" in node0.name:
                rmv_list.append(node0)
                graph.node[i - 10].output[0] = graph.node[i + 1].input[1]
                # graph.node[i + 1].output[0] = graph.node[i + 4].input[0]
                graph.node[i + 1].output[0] = graph.node[i + 5].input[0]
            if "new" in node0.name:
                rmv_list.append(node0)
            # if (int(node0.name.split("_")[-1]) - 294) % 201 == 0:
            if "self_attn/Transpose" in node0.name and "_" not in node0.name:
                rmv_list.append(node0)
                # graph.node[i - 1].output[0] = graph.node[i + 14].input[1]
                # graph.node[i + 14].output[0] = graph.node[i + 15].input[0]
                graph.node[i - 1].output[0] = graph.node[i + 15].input[1]
                graph.node[i + 15].output[0] = graph.node[i + 16].input[0]
            # if (int(node0.name.split("_")[-1]) - 378) % 201 == 0:
            if "self_attn/Transpose_3" in node0.name == 0:
                rmv_list.append(node0)
                graph.node[i - 1].output[0] = graph.node[i + 1].input[0]                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                        
                    
    # for i, node0 in enumerate(graph.node):
    #     if node0.op_type == "MatMul" and "query" in node0.input[0]:
    #         swap_input(node0)

    # Add shape=[4, 64, -1] to Reshape 
    for i, node0 in enumerate(graph.node):
        # if node0.op_type == "Reshape" and ((int(node0.name.split("_")[-1]) - 281) % 201 == 0 or (int(node0.name.split("_")[-1]) - 287) % 201 == 0 \
        #     or (int(node0.name.split("_")[-1]) - 293) % 201 == 0):
        if node0.op_type == "Reshape" and (node0.name.endswith("self_attn/Reshape") or node0.name.endswith("self_attn/Reshape_1") \
            or node0.name.endswith("self_attn/Reshape_2")):
            for j, node1 in enumerate(graph.node):
                if node1.op_type == "Constant" and node1.output[0] == node0.input[1]:
                    data_array = np.array([4, 64, -1]).astype(np.int64)
                    value = onnx.helper.make_tensor(name='value', data_type=onnx.TensorProto.INT64, dims=data_array.shape, vals=data_array)
                    new_scale_node = onnx.helper.make_node(op_type='Constant', inputs=[], outputs=[node1.output[0]], value=value)
                    graph.node.remove(node1)
                    graph.node.append(new_scale_node) 
        
    # # pwc2linear: remove transpose and swap matmul inputs
    # for i, node0 in enumerate(graph.node): 
    #     if node0.op_type == "Transpose" and "pwc2linear" in node0.input[0] and node0.input[0].split('_')[-1] == "1":
    #         rmv_list.append(node0)
    #         for j, node1 in enumerate(graph.node):
    #             if node1.op_type == "Add" and "pwc2linear" in node1.name and node1.output[0] == node0.input[0]:
    #                 node1.output[0] = graph.node[i + 1].input[0]
        
    #     if node0.op_type == "Transpose" and "pwc2linear" in node0.output[0] and node0.output[0].split('_')[-1] == "2":
    #         rmv_list.append(node0)
    #         for j, node1 in enumerate(graph.node):
    #             if node1.op_type == "MatMul" and "pwc2linear" in node1.name and node1.input[0] == node0.output[0]:
    #                 node1.input[0] = graph.node[i - 1].output[0]

    # # Add BRMatTAdd
    # for i, node0 in enumerate(graph.node): 
    #     # if node0.op_type == "MatMul" and "query" in node0.input[0] or node0.op_type == "MatMul" and "pwc2linear" in node0.name:
    #     if node0.op_type == "MatMul" and "norm_mha/LayerNormalization" in node0.input[0] or node0.op_type == "MatMul" and "pwc2linear" in node0.name:
    #         if node0.name.split("_")[-1] == "2":
    #             continue
    #         # swap_input(node0)
    #         add_node = graph.node[i + 1]
    #         # if "query" in node0.input[0]:
    #         if "norm_mha/LayerNormalization" in node0.input[0]:
    #             bias = add_node.input[0]
    #         else:
    #             bias = add_node.input[1]
    #         # MatMulT_node = onnx.helper.make_node('BRMatTAdd', name=f"{node0.name}_add", inputs=[node0.input[1], node0.input[0], bias], outputs=[add_node.output[0]])
    #         MatMulT_node = onnx.helper.make_node('TransposeMatmulAdd', name=f"{node0.name}_add", inputs=[node0.input[1], node0.input[0], bias], outputs=[add_node.output[0]])
    #         model.graph.node.remove(node0)
    #         model.graph.node.remove(add_node)
    #         model.graph.node.append(MatMulT_node)
            
    nodes = copy.deepcopy(graph.node)
    del graph.node[:]
    for node in nodes:
        if node not in rmv_list:
            graph.node.append(node)

for t in model.graph.initializer:
    if t.name in biaslist_u:
        model.graph.initializer.remove(t)

for t in model.graph.initializer:
    if t.name in biaslist_v:
        model.graph.initializer.remove(t)

print("=====================")       
for t in model.graph.initializer:
    if t.name in biaslist_u or t.name in biaslist_v:
        print(t.name)

onnx.save(model, const_onnx)
        