import os
import copy
import onnx

from configure import logger, level, msg_fmt, __LINE__, __FUNC__

file_name = os.path.basename(__file__)
logger.set_level(level=level, name=file_name)

def rmnodes_saveonnx(model, rm_nodes, msg, save_onnx):
    nodes_all = copy.deepcopy(model.graph.node)
    del model.graph.node[:]
    for node in nodes_all:
        if node not in rm_nodes:
            model.graph.node.append(node)
    
    # msg = "remove Unsqueeze_Equal"
    logger.log(msg_fmt.format(__LINE__, __FUNC__, msg), level=level)
    
    if save_onnx is not None:
        if len(save_onnx) and save_onnx.lower().endswith(".onnx"):
            onnx.save(model, save_onnx)
            msg = "save {}".format(save_onnx)
            logger.log(msg_fmt.format(__LINE__, __FUNC__, msg), level=level)
    

def traverse_mode(onnx_path):
    onnx_model = onnx.load(onnx_path)
    nodes = onnx_model.graph.node
    
    txt_file = os.path.splitext(os.path.basename(onnx_path))[0] + ".txt"
    if os.path.exists(txt_file): os.remove(txt_file)
    
    with open(txt_file, "a+", encoding="utf-8") as txt:
        for i, item in enumerate(onnx_model.graph.input):
            txt.write("input  {:<6} {:<22} {} \n".format(i, " ", item.name))
        for i, item in enumerate(onnx_model.graph.output):
            txt.write("output {:<6} {:<22} {} \n".format(i, " ", item.name))
        for i, node in enumerate(nodes):
            txt.write("###### {:<6} {:<22} {} \n".format(i, node.op_type, node.name))

if __name__ == '__main__':
    
    onnx_path = "D:\\vbox\\encoder_shapeinfer.onnx"
    fused_onnx = "encoder_fused.onnx"
    rmlen_onnx = "encoder_fused_rmlength.onnx"
    static_onnx = "encoder_shapeinfer_fusednoGLU_static.onnx"
    final_onnx = "encoder_shapeinfer_final.onnx"
    inputmask_onnx = "encoder_shapeinfer_fusednoGLU_static_inputmask.onnx"
    traverse_mode(onnx_path)
    # traverse_mode(fused_onnx)
    # traverse_mode(rmlen_onnx)