import onnx.numpy_helper as numpy_helper
import onnx
import copy
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F

from onnx_files import onnx_path, fused_onnx, static_onnx, inputmask_onnx

model_file = "ce_modify_210.onnx"
# model_file = "encoder_shapeinfer_fusednoGLU_static.onnx"
model_file = fused_onnx# static_onnx
model = onnx.load(model_file)
graph = model.graph

# verify inference result
feats = np.random.random((1, 2048, 80))
input_mask = np.zeros((4, 511, 511))
feat_length = lengths = np.array([feats.shape[1]]).astype(np.int64)

# graph.input[0].type.tensor_type.shape.dim[1].dim_value = 2048
input_mask_node = onnx.helper.make_tensor_value_info(name='input_mask_1', elem_type=1, shape=[1, 256, 511, 19])
graph.input.append(input_mask_node)

input_mask_node_2 = onnx.helper.make_tensor_value_info(name='input_mask_2', elem_type=1, shape=[1, 256, 511])
graph.input.append(input_mask_node_2)

input_mask_node_3 = onnx.helper.make_tensor_value_info(name='input_mask_3', elem_type=1, shape=[1, 511, 256])
graph.input.append(input_mask_node_3)

rmvlist = []

## input_mask_1
for i, node0 in enumerate(graph.node):
    if node0.name == '/embed/model/conv/conv.3/Relu':
        add_node = onnx.helper.make_node(name='Cbr_mask', op_type='Mul', inputs=[node0.output[0], 'input_mask_1'], \
                                        outputs=['Cbr_mask_out'])
        model.graph.node.append(add_node)
        for j, node1 in enumerate(graph.node):
            if len(node1.input) > 0 and node1.input[0] == node0.output[0]:
                if node1.name == 'Cbr_mask':
                    continue
                node1.input[0] = add_node.output[0]
        
# add trans(deprecated)
# add relshift nodes
add_list = []
add_in_list_0 = []
add_in_list_1 = []
for i, node0 in enumerate(graph.node):
    if node0.op_type == 'Add':
        # if "self_attn/MatMul_output_0" in node0.input[0] and "self_attn/MatMul_1" in node0.input[1]:
        if "self_attn/MatMul_output_0" in node0.input[0] and "self_attn/Slice_2" in node0.input[1]:
            add_list.append(node0)
            add_in_list_0.append(node0.input[0])
            add_in_list_1.append(node0.input[1])

def add_constant(model, name, value):
    value1 = onnx.helper.make_tensor(name = name, data_type=onnx.TensorProto.INT64, dims=value.shape, vals=value)
    const1 = onnx.helper.make_node(op_type ='Constant', inputs=[], outputs=[name], value=value1)
    model.graph.node.append(const1)

def new_node_scr(model, matmul_node, len, i):
    len_np = np.array([len], dtype=np.int64)
    len_value = onnx.helper.make_tensor(name = f'feat_len_{i}', data_type=onnx.TensorProto.INT64, dims=len_np.shape, vals=len_np)
    len_node = onnx.helper.make_node(op_type ='Constant', name = f'len_{i}', inputs=[], outputs=[f'feat_len_{i}'], value=len_value)
    model.graph.node.append(len_node)
    scr_node = onnx.helper.make_node('BRSCR', name=f"BRSCR_{i}", inputs=[matmul_node.output[0], f'feat_len_{i}'], outputs=[f'scr_out_{i}'])
    model.graph.node.append(scr_node)
    
    return scr_node

### Transpose: from 4 dims to 3 dims
# for i in range(12):

for i in range(15):
    trans_i_0 = onnx.helper.make_node(op_type ='Transpose', inputs=[add_in_list_0[i]], outputs=[f'Trans_new_{i}'], name=f'Trans_new_{2*i}')
    # trans_i_1 = onnx.helper.make_node(op_type ='Transpose', inputs=[add_in_list_1[i]], outputs=[f'Trans_new_{2*i+1}'], name=f'Trans_new_{2*i+1}')
    new_attr = onnx.helper.make_attribute('perm', [0, 1, 2])
    trans_i_0.attribute.insert(0, new_attr)
    model.graph.node.append(trans_i_0)
    add_list[i].input[0] = trans_i_0.output[0]
    
matmul_nodes = []
slice_nodes = []
mm_index = []
slice_index = []

# matrixbd
for i, node0 in enumerate(graph.node):
    if node0.op_type == "MatMul" and "self_attn/MatMul_1" in node0.output[0]:
        matmul_nodes.append(node0)
        mm_index.append(i)

def add_constant_2(model, name, value, type_flag, index):
    if type_flag == 0:
        value1 = onnx.helper.make_tensor(name = name, data_type=onnx.TensorProto.INT64, dims=value.shape, vals=value)
    else:
        value1 = onnx.helper.make_tensor(name = name, data_type=onnx.TensorProto.FLOAT, dims=value.shape, vals=value)
    const1 = onnx.helper.make_node(op_type ='Constant', inputs=[], outputs=[name], value=value1)
    model.graph.node.append(const1)
    
def add_slice_node(model, i, matmul_node, index):
    starts2 = np.array([0, 0], dtype=np.int64)
    add_constant_2(model, 'starts%d' % i, starts2, 0, index + 1)
    ends2 = np.array([130, 130], dtype=np.int64)
    add_constant_2(model, 'ends%d' % i, ends2, 0, index + 2)
    axes2 = np.array([1, 2], dtype=np.int64)
    add_constant_2(model, 'axes%d' % i, axes2, 0, index + 3)
    steps2 = np.array([1, 1], dtype=np.int64)
    add_constant_2(model, 'steps%d' % i, steps2, 0, index + 4)
    slice_node = onnx.helper.make_node('Slice', name=f'Slice_relshift_{i}', inputs=[matmul_node.output[0], 'starts%d' % i, 'ends%d' % i, 'axes%d' % i, 'steps%d' % i], outputs=['slice_relshift_out%d' % i])
    model.graph.node.append(slice_node)
    
    return slice_node

def add_concat_node(model, i, slice_node, index):
    value_0 = np.zeros((4, 130, 381), dtype=np.float32)
    add_constant_2(model, f'concat_rels_input0_{i}', value_0, 1, index + 1)
    concat_node_0 = onnx.helper.make_node(op_type='Concat', inputs=[slice_node.output[0], f'concat_rels_input0_{i}'], outputs=[f'concat_rels_out0_{i}'], axis=-1, name=f'concat_rels_out0_{i}')
    model.graph.node.append(concat_node_0)
    
    value_1 = np.zeros((4, 381, 511), dtype=np.float32)
    add_constant_2(model, f'concat_rels_input1_{i}', value_1, 1, index + 3)
    concat_node_1 = onnx.helper.make_node(op_type='Concat', inputs=[f'concat_rels_out0_{i}', f'concat_rels_input1_{i}'], outputs=[f'concat_rels_out1_{i}'], axis=-2, name=f'concat_rels_out1_{i}')
    model.graph.node.append(concat_node_1)
    add_list[i].input[1] = concat_node_1.output[0]
    
    return concat_node_1

# matrixbd
# for i, matmul_node in enumerate(matmul_nodes):
#     len = 130
#     scr_node = new_node_scr(model, matmul_node, len, i)
#     for j, node1 in enumerate(graph.node):
#         if node1.op_type == 'Add' and node1.input[1] == matmul_node.output[0]:
#             node1.input[1] = scr_node.output[0]
    
# pwc2linear
wnamelist = []
def add_linear(i, j):
    value_dict = {}
    w_name = f'model.encoders.{i}.conv_module.pointwise_conv{j}.weight'
    b_name = f'model.encoders.{i}.conv_module.pointwise_conv{j}.bias'
    for t in model.graph.initializer:
        if t.name == b_name:
            value_dict[t.name] = numpy_helper.to_array(t)
        if t.name == w_name:
            value_dict[t.name] = numpy_helper.to_array(t)
            wnamelist.append(t.name)
    
    value_w = value_dict[w_name].squeeze()
    value_w = value_w.transpose((1, 0))
    print(value_w.shape)
    value_b = value_dict[b_name]
    value1 = onnx.helper.make_tensor(name=f'{w_name}_new', data_type=onnx.TensorProto.FLOAT, dims=value_w.shape, vals=value_w)
    value2 = onnx.helper.make_tensor(name=f'{b_name}_new', data_type=onnx.TensorProto.FLOAT, dims=value_b.shape, vals=value_b)
    const1 = onnx.helper.make_node(op_type='Constant', inputs=[], outputs=[w_name], value=value1, name=f'node_{w_name}')
    const2 = onnx.helper.make_node(op_type='Constant', inputs=[], outputs=[b_name], value=value2, name=f'node_{b_name}')
    model.graph.node.append(const1)
    model.graph.node.append(const2)
    return const1, const2

# for i in range(12):
for i in range(15):
    klist = []
    for k, node0 in enumerate(graph.node):
        if node0.op_type == 'Conv' and f"encoders.{i}." in node0.input[1]:
            klist.append(k)
            rmvlist.append(node0) 
    for j in range(1, 3):
        const1, const2 = add_linear(i, j)
        new_matmul = onnx.helper.make_node('MatMul', inputs=['A', 'B'], outputs=['Y'], name=f'pwc2linear_matmul_{i}_{j}')
        new_add = onnx.helper.make_node('Add', inputs=['A', 'B'], outputs=['C'], name=f'pwc2linear_add_{i}_{j}')
        new_matmul.input[1] = const1.output[0]
        new_matmul.output[0] = f'pwc2linear_interm_{i}_{j}'
        new_add.input[0] = new_matmul.output[0]
        new_add.input[1] = const2.output[0]
        new_add.output[0] = f'pwc2linear_{i}_{j}'
        if j == 1:
            graph.node[klist[0] - 2].output[0] = graph.node[klist[0]].input[0]
            new_matmul.input[0] = graph.node[klist[0] - 2].output[0]
            graph.node[klist[0] - 1].input[0] = new_add.output[0]
            graph.node[klist[0] - 1].output[0] = graph.node[klist[0] + 1].input[0]
        else:
            graph.node[klist[1] + 1].input[0] = graph.node[klist[1] - 1].output[0]
            new_matmul.input[0] = f'pwc2linear_trans_{i}_{j}'
            graph.node[klist[1] + 1].output[0] = new_matmul.input[0]
            graph.node[klist[1] + 3].input[0] = new_add.output[0]
        print(new_matmul.input[0])
        model.graph.node.append(new_matmul)
        model.graph.node.append(new_add)

for t in model.graph.initializer:
    if t.name in wnamelist:
        model.graph.initializer.remove(t)
        
# add mask before dwc
for i, node0 in enumerate(graph.node):
    if node0.op_type == 'Mul' and node0.name not in ['Cbr_mask'] and 'conv_module/Mul' in node0.name:
    # if node0.op_type == 'Glu' and node0.name not in ['Cbr_mask'] and 'conv_module/Mul' in node0.output[0]:
        add_node = onnx.helper.make_node(name=f'Mul_new_mask_2_{i}', op_type='Mul', inputs=[node0.output[0], 'input_mask_2'], \
                                        outputs=[f'Mul_new_mask_2_out{i}'])
        model.graph.node.append(add_node)
        for j, node1 in enumerate(graph.node):
            if node1.op_type == 'Conv' and node1.input[0] == node0.output[0]:
                if node1.name == f'Mul_new_mask_2_{i}':
                    continue
                node1.input[0] = add_node.output[0]

# add mask after dwc
for i, node0 in enumerate(graph.node):
    if node0.op_type == 'Add' and 'pwc2linear_add'in node0.name and node0.name.split('_')[-1] == '2':
        add_node = onnx.helper.make_node(name=f'Mul_new_mask_3_{i}', op_type='Mul', inputs=[node0.output[0], 'input_mask_3'], \
                                        outputs=[f'Mul_new_mask_3_out{i}'])
        model.graph.node.append(add_node)
        for j, node1 in enumerate(graph.node):
            if node1.op_type == 'Mul' and node1.input[0] == node0.output[0]:
                if node1.name == f'Mul_new_mask_3_{i}':
                    continue
                node1.input[0] = add_node.output[0]

# add mask for encoder out
for i, node0 in enumerate(graph.node):
    # if node0.op_type == 'Add' and node0.output[0] == "encoder_out":
    if node0.op_type == 'LayerNormalization' and node0.output[0] == "encoder_out":
        add_node = onnx.helper.make_node(name=f'Mul_new_mask_4', op_type='Mul', inputs=[node0.output[0], 'input_mask_3'], \
                                        outputs=['encoder_out_mask'])
        model.graph.node.append(add_node)

nodes = copy.deepcopy(graph.node)
del graph.node[:]
del graph.output[0]
model.graph.output.extend([onnx.ValueInfoProto(name='encoder_out_mask')])
for node in nodes:
    if node not in rmvlist:
        graph.node.append(node)

# onnx.save(model, "conformer_encoder_inf_210.onnx")
onnx.save(model, inputmask_onnx)
