import os
import onnx
from onnx import helper
import onnxruntime as oxrt

def load_onnx_model(onnx_model_name):
    # code_dir = os.path.abspath(os.path.dirname(__file__))
    work_dir = os.getcwd()
    onnx_model_dir = os.path.join(work_dir, 'model')
    onnx_model_path = os.path.join(onnx_model_dir, onnx_model_name)
    model = onnx.load(onnx_model_path)

    return model

def check_and_save_onnx_model(model_def, model_name):
    onnx.checker.check_model(model_def)
    print('The model is checked!')

    # print('The ir_version in model: {}'.format(model_def.ir_version))
    # print('The producer_name in model: {}'.format(model_def.producer_name))
    # print('The graph in model:\n{}'.format(model_def.graph))

    work_dir = os.getcwd()
    onnx_model_dir = os.path.join(work_dir, 'model')
    onnx_model_path = os.path.join(onnx_model_dir, model_name)

    model_def.opset_import[0].version = 11
    onnx.save(model_def, onnx_model_path)
    print("Save onnx model:{} OK".format(onnx_model_path))

def make_model(graph_def, model_name):
    # Create the model (ModelProto)
    model_def = helper.make_model(graph_def, producer_name='onnx-example')
    check_and_save_onnx_model(model_def, model_name)

def create_onnx_session(model_name):
    work_dir = os.getcwd()
    onnx_model_dir = os.path.join(work_dir, 'model')
    onnx_model_path = os.path.join(onnx_model_dir, model_name)

    ## create onnx runtime
    sess_options = oxrt.SessionOptions()
    sess_options.intra_op_num_threads = 1
    sess_options.execution_mode = oxrt.ExecutionMode.ORT_SEQUENTIAL
    sess_options.graph_optimization_level = oxrt.GraphOptimizationLevel.ORT_ENABLE_ALL
    sess = oxrt.InferenceSession(onnx_model_path, sess_options=sess_options)

    return sess