import onnxruntime as ort
import numpy as np
import argparse
import onnx
from google.protobuf.json_format import MessageToDict, MessageToJson
import json
from collections import OrderedDict


def onnx_dtype_to_np_dtype(dtype_idx):
    if dtype_idx == 1:
        return np.float32
    elif dtype_idx == 6:
        return np.int32
    elif dtype_idx == 7:
        return np.int64
    elif dtype_idx == 9:
        return bool
    elif dtype_idx == 10:
        return np.float16
    elif dtype_idx == 12:
        return np.uint32
    elif dtype_idx == 13:
        return np.uint64
    elif dtype_idx == 3:
        return np.int8
    else:
        print("ERROR! unsupoorted dtype: " + str(dtype_idx))
        exit(1)


def get_input_shape(model, index):
    input_shape = []
    _input = model.graph.input[index]
    input_dict = MessageToDict(_input)
    input_dims = input_dict["type"]["tensorType"]["shape"]["dim"]
    for d in input_dims:
        input_shape.append(int(d["dimValue"]))
    return input_shape


def get_input_dtype(model, index):
    _input = model.graph.input[index]
    input_dict = MessageToDict(_input)
    return input_dict["type"]["tensorType"]["elemType"]


def get_output_shape(model, index):
    output_shape = []
    _output = model.graph.output[index]
    output_dict = MessageToDict(_output)
    output_dims = output_dict["type"]["tensorType"]["shape"]["dim"]
    for d in output_dims:
        output_shape.append(int(d["dimValue"]))
    return output_shape


def get_output_dtype(model, index):
    _output = model.graph.output[index]
    output_dict = MessageToDict(_output)
    return output_dict["type"]["tensorType"]["elemType"]


def make_input_dtype(model):
    res_dict = {}
    for index, _input in enumerate(model.graph.input):
        input_dict = MessageToDict(_input)
        input_name = input_dict["name"]
        if input_name in net_feed_input:
            res_dict[input_name] = get_input_dtype(model, index)
    return res_dict


def make_input_shape(model, user_input_shapes):
    res_dict = {}
    for index, _input in enumerate(model.graph.input):
        input_dict = MessageToDict(_input)
        input_name = input_dict["name"]
        if input_name in net_feed_input:
            if len(user_input_shapes) != 0:
                res_dict[input_name] = user_input_shapes[index]
            else:
                res_dict[input_name] = get_input_shape(model, index)
    return res_dict


if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Get ONNX model input/output info")
    parser.add_argument(
        "-m", "--model", required=True, nargs=1, type=str, help="ONNX file"
    )
    parser.add_argument(
        "-s",
        "--input_shapes",
        required=False,
        nargs="?",
        type=str,
        help="Dynamic input model shapes, e.g. 1,2,3,4:5,6,7,8",
        default="",
    )
    parser.add_argument(
        "-d",
        "--dump_path",
        required=False,
        nargs="?",
        type=str,
        help="Dump output for each layer in .npy to <dump_path>",
        default="",
    )
    parser.add_argument(
        "-i",
        "--in_data_file",
        required=False,
        nargs="?",
        type=str,
        help="Benchmark input data files, e.g. input0.bin,input1.bin",
        default="",
    )
    args = parser.parse_args()
    source_file = args.model[0]

    input_files = []
    if args.in_data_file != "":
        input_files = args.in_data_file.split(",")

    print("Getting input/output info")
    model = onnx.load(source_file)
    output_names = [node.name for node in model.graph.output]

    input_all = [node.name for node in model.graph.input]
    input_initializer = [node.name for node in model.graph.initializer]
    net_feed_input = list(set(input_all) - set(input_initializer))

    new_shape = []
    is_dynamic_input = False
    if args.input_shapes != "":
        is_dynamic_input = True
        for s in args.input_shapes.split(":"):
            new_shape.append([int(s) for s in s.split(",")])

    dump_path = ""
    if args.dump_path != "":
        dump_path = args.dump_path
        for node in model.graph.node:
            for output in node.output:
                model.graph.output.extend([onnx.ValueInfoProto(name=output)])
        output_names = [node.name for node in model.graph.output]

    make_input_res = make_input_shape(model, new_shape)
    make_input_dtype_res = make_input_dtype(model)

    model_path = source_file
    model_path_name = source_file.split("/")[-1]
    print("Start running inference for..." + model_path_name)
    session = ort.InferenceSession(model.SerializeToString())

    infer_input_dict = {}
    idx = 0
    for key in make_input_res:
        input_dat = []
        shape_dim = make_input_res[key]
        dtype_num = make_input_dtype_res[key]
        input_dat = np.random.rand(*shape_dim).astype(dtype=onnx_dtype_to_np_dtype(dtype_idx=dtype_num))
        if (len(input_files) != 0):
            input_dat = np.fromfile(input_files[idx], dtype=onnx_dtype_to_np_dtype(dtype_idx=dtype_num)).reshape(*shape_dim)
        infer_input_dict[key] = ort.OrtValue.ortvalue_from_numpy(input_dat)
        input_bin_name = model_path_name + "_in.bin_" + str(idx)
        input_dat.tofile(input_bin_name)
        print("input %d has been saved to %s" % (idx, input_bin_name))
        idx += 1

    # Run inference
    outputs = session.run(output_names, infer_input_dict)

    if dump_path != "":
        out_dict = OrderedDict(zip(output_names, outputs))
        idx = 0
        for key in out_dict:
            mod_key = key.replace('/', '_')
            npy_name = str(idx) + "_" + mod_key + ".npy"
            npy = out_dict[key]
            np.save(dump_path + "/" + npy_name, npy)
            idx += 1

        print("Run infer with data dump success!")
        exit(0)

    if is_dynamic_input or len(input_files) != 0:
        for idx, out in enumerate(outputs):
            outputs_np = np.array(out)
            output_bin_name = model_path_name + "_out.bin_" + str(idx)
            outputs_np.tofile(output_bin_name)
            print("output %d has been saved to %s" % (idx, output_bin_name))
    else:
        output_file_name = model_path_name + "_out.txt"
        with open(output_file_name, "w") as f:
            for idx, out in enumerate(outputs):
                infer_output_list = get_output_shape(model, idx)
                outputs_np = np.array(out).reshape(tuple(infer_output_list))
                output_bin_name = model_path_name + "_out.bin_" + str(idx)
                outputs_np.tofile(output_bin_name)
                print("output %d has been saved to %s" % (idx, output_bin_name))
                f.write(output_names[idx] + " ")
                f.write(str(len(tuple(infer_output_list))) + " ")
                for i in range(len(tuple(infer_output_list))):
                    f.write(str(infer_output_list[i]) + " ")
                f.write("\n")
                for i in outputs_np.flatten().tolist():
                    f.write(str(i) + " ")
                f.write("\n")
        print("output txt has been saved to: " + output_file_name)

    print("Run infer success!")
