import os
import torch

class CheckPointer:
    def __init__(self, checkpoint_dir, device=None):
        self.dir = checkpoint_dir
        self.index_filename = 'last_checkpoint.txt'    
        self.device = device
    
    @property
    def index_path(self):
        return os.path.join(self.dir, self.index_filename)
    
    @property
    def last_checkpoint_path(self):
        if not os.path.exists(self.index_path):
            raise FileNotFoundError(self.index_path)
        with open(self.index_path, 'r', encoding='utf-8') as f:
            last_checkpoint_name = f.readline().strip()

        path = os.path.join(self.dir, last_checkpoint_name)
        return path

    def exists(self):
        return os.path.exists(self.index_path) and os.path.exists(self.last_checkpoint_path)

    def load(self, model, optimizer=None, scheduler=None, path=None):
        if path is None:
            if not self.exists():
                raise FileNotFoundError("checkpoint not exists")
            path = self.last_checkpoint_path
        else:
            if not os.path.exists(path):
                raise FileNotFoundError("checkpoint not found: {}".format(path))
        
        state_dict = torch.load(path, map_location=self.device)
        
        if isinstance(model, torch.nn.DataParallel):
            model.module.load_state_dict(state_dict['model'])
        else:
            model.load_state_dict(state_dict['model'])
        
        if "optimizer" in state_dict and optimizer is not None:
            optimizer.load_state_dict(state_dict["optimizer"])
        if "scheduler" in state_dict and scheduler is not None:
            scheduler.load_state_dict(state_dict["scheduler"])
        iteration = state_dict.get('iteration', -1) + 1
        return iteration

    def save(self, model, optimizer=None, scheduler=None, iteration=None, checkpoint_name=None):
        last_checkpoint_name = 'model_{:0>8}.pth'.format(iteration) if checkpoint_name is None else checkpoint_name
        last_checkpoint_path = os.path.join(self.dir, last_checkpoint_name)
        state_dict = {
            "model": model.module.state_dict() if isinstance(model, torch.nn.DataParallel) else model.state_dict(),
        }
        
        if optimizer is not None:
            state_dict["optimizer"] = optimizer.state_dict()
        
        if scheduler is not None:
            state_dict["scheduler"] = scheduler.state_dict()

        if iteration is not None:
            state_dict["iteration"] = iteration

        os.makedirs(self.dir, exist_ok=True)
        torch.save(state_dict, last_checkpoint_path)
        
        if checkpoint_name is None:
            with open(self.index_path, 'w', encoding='utf-8') as f:
                f.write(last_checkpoint_name)
            
