import torch
import os
import shutil


def save_checkpoint(model, epoch_num, optimizer, config):
    checkpoint_dict = {}
    if isinstance(model, torch.nn.DataParallel) or isinstance(model, torch.nn.parallel.DistributedDataParallel):
        checkpoint_dict['state_dict'] = model.module.state_dict()
    else:
        checkpoint_dict['state_dict'] = model.state_dict()
    if config.save_optimizer:
        checkpoint_dict['optimizer'] = optimizer.state_dict()
    if not os.path.exists(config.save_path):
        try:
            os.makedirs(config.save_path)
        except Exception:
               pass
    filename =  config.save_path + '/{0}_{1:03d}_{2}.pth.tar'.format(config.checkpoint_prefix, epoch_num, config.rank)
    print('save check point to {}'.format(filename))
    try:
        torch.save(checkpoint_dict, filename)
    except OSError:
        print('save checkpoint failded')
        pass
