"""
Export onnx of standard model 
"""

import sys
sys.path.append('./')

import argparse
import shutil
import time
import yaml
import json
from easydict import EasyDict as edict
import logging

import torch
import torch.nn as nn
import torch.backends.cudnn as cudnn
import onnx
from onnx import shape_inference

from utils import *

parser = argparse.ArgumentParser(description='PyTorch Image Classification Training')
parser.add_argument('config', type=str, help='config file path')
parser.add_argument('--gpu', type=int, default=0, help='ckpt file path')
parser.add_argument('--resume', type=str, help='ckpt file path')
parser.add_argument('--name', type=str, required=True, help='experiment name')

EXPORT_PARAMS = True

gvar = edict({
    'args': None,
    'cmd_args': None,

    'saver': None,
})

def main():
    cmd_args = parser.parse_args()
    cmd_args = edict(cmd_args.__dict__)

    args = yaml.load(open(cmd_args.config), yaml.Loader)
    args = edict(args)

    gvar.saver = Saver(cmd_args.name)
    set_logger(gvar.saver.save_dir / 'log.txt')
    logging.info("cmd_args: " + json.dumps(cmd_args, indent=4, sort_keys=True))
    logging.info("args: " + json.dumps(args, indent=4, sort_keys=True))

    tb_log_dir = gvar.saver.save_dir / 'tb_logs'
    tb_log_dir.mkdir()

    gvar.args = args
    gvar.cmd_args = cmd_args
    if args.seed is not None:
        set_seeds(args.seed)

    # create model
    model = build_model(args.model)
    model.cuda(gvar.cmd_args.gpu).half()
    dummy_input = torch.randn([1, 3, 224, 224]).cuda(gvar.cmd_args.gpu).half()

    # optionally resume from a checkpoint
    if cmd_args.resume is not None:
        logging.info(f'resuming from ckpt: {cmd_args.resume}')
        best_acc1, epoch, cur_iter = \
            resume_from_ckpt(model, None, cmd_args.resume, device=torch.device(f'cuda:{cmd_args.gpus[0]}'))
        logging.info(f'best_acc1: {best_acc1}, epoch: {epoch}, cur_iter: {cur_iter}')

    out_file = gvar.saver.save_dir / "model.onnx"
    logging.info(f'saving onnx to {str(out_file)}')
    torch.onnx.export(
        model,
        (dummy_input,),
        str(out_file),
        export_params=EXPORT_PARAMS)

    onnx.save(shape_inference.infer_shapes(onnx.load(out_file)), out_file)

if __name__ == '__main__':
    main()