import torch
import os

class CheckPoint(object):

    def __init__(self, opt):
        self.save_path = opt.save_path
        self.checkpoint_params = {
            'model': None,
            'adam_opts': None,
            'resume_epoch': None
        }

    def save_model(self, epoch=None, model=None, adam_opts=None, best_flag=False):
        if not os.path.isdir(self.save_path):
            os.mkdir(self.save_path)

        checkpoint_param = {
            'model': model,
            'adam_opts': adam_opts,
            'resume_epoch': epoch
        }

        torch.save(checkpoint_param, self.save_path + "finetune_checkpoint.pkl")

        if best_flag:
            best_model = {'model': model}
            torch.save(best_model, self.save_path + "finetune_best_model.pkl")