

import os
import torch
from typing import Callable


def collate_fn(padding_tensor: torch.FloatTensor) -> Callable:
    """
    为在' torch.utils.data.DataLoader '中增加' collate_fn '而定义的函数。
    注意，需要填充人类可读的日期张量。
    :param padding_tensor:用于填充人类可读日期张量末尾的张量。
    :return _collate_fn:返回对象是一个函数。
    """

    def _collate_fn(batch):
        _padding_tensor = padding_tensor.view(-1, 1)
        _batch_human_readable_tensor, _batch_machine_readable_tensor, batch_raw = zip(*batch)
        _max_length_human = max(
            _human_readable_tensor.shape[0] for _human_readable_tensor in _batch_human_readable_tensor)
        _batch_human_readable_tensor_padded = []

        for _human_readable_tensor in _batch_human_readable_tensor:
            _padded_length = _max_length_human - _human_readable_tensor.shape[0]
            _padded_tensor = _padding_tensor.expand(_padding_tensor.shape[0], _padded_length).transpose(0, 1)
            _batch_human_readable_tensor_padded.append(
                torch.vstack((_human_readable_tensor, _padded_tensor)).unsqueeze(0))
        batch_human_readable_tensor = torch.vstack(_batch_human_readable_tensor_padded).float()
        batch_machine_readable_tensor = torch.vstack(
            list(map(lambda tensor: tensor.unsqueeze(0), _batch_machine_readable_tensor))).float()
        return batch_human_readable_tensor, batch_machine_readable_tensor, batch_raw

    return _collate_fn


def find_last_checkpoint_epoch(checkpoint_dir: str) -> int:
    """
    找到检查点目录中最大的epoch。
    :param checkpoint_dir:检查点保存的目录路径。
    """
    epochs = []
    for name in os.listdir(checkpoint_dir):
        if os.path.splitext(name)[-1] == '.pth':
            epochs.append(int(name.strip('ckpt_epoch_.pth')))
    if len(epochs) == 0:
        raise IOError('No checkpoint found in {}'.format(checkpoint_dir))
    return max(epochs)


def save_checkpoint(checkpoint_dir: str, epoch: int, model: torch.nn.DataParallel,
                    optimizer: torch.optim.Adam = None) -> None:
    """
    将模型保存到标记为epoch的检查点目录。
    :param checkpoint_dir:检查点保存的目录路径。
    :param epoch:模型的批次。
    :param model:要保存的模型。
    :param optimizer:用于优化模型的优化器。
    """
    checkpoint = {}
    checkpoint['epoch'] = epoch
    if isinstance(model, torch.nn.DataParallel):
        model_state_dict = model.module.state_dict()
    else:
        model_state_dict = model.state_dict()
    checkpoint['model'] = model_state_dict
    if optimizer is not None:
        optimizer_state_dict = optimizer.state_dict()
        checkpoint['optimizer'] = optimizer_state_dict
    else:
        checkpoint['optimizer'] = None
    torch.save(checkpoint, os.path.join(checkpoint_dir, 'ckpt_epoch_%02d.pth' % epoch))


def load_checkpoint(checkpoint_dir: str, epoch: int = None) -> torch.nn.Module:
    """
    使用默认的最大epoch加载检查点模型。
    :param checkpoint_dir:检查点保存的目录路径。
    :param epoch:模型的批次。
    """
    if epoch is None:
        epoch = find_last_checkpoint_epoch(checkpoint_dir)
    checkpoint = torch.load(os.path.join(checkpoint_dir, 'ckpt_epoch_%02d.pth' % epoch))
    return checkpoint


def load_model(checkpoint_dir: str, epoch: int, model: torch.nn.Module) -> torch.nn.Module:
    """
    从检查点目录加载模型。
    :param checkpoint_dir:检查点保存的目录路径。
    :param epoch:模型的批次。
    :param model:初始模型。
    """
    try:
        checkpoint = load_checkpoint(checkpoint_dir, epoch)
        model_state_dict = checkpoint['model']
        if isinstance(model, torch.nn.DataParallel):
            model.module.load_state_dict(model_state_dict)
        else:
            model.load_state_dict(model_state_dict)
    except Exception as e:
        print('Fail to load model: {}'.format(e))
    return model


def load_optimizer(checkpoint_dir: str, epoch: int, optimizer: torch.optim.Adam) -> torch.optim.Adam:
    """
    从检查点目录加载优化器。
    :param checkpoint_dir:检查点保存的目录路径。
    :param epoch:模型的纪元。
    :param optimizer: 初始优化器。
    """
    try:
        checkpoint = load_checkpoint(checkpoint_dir, epoch)
        optimizer_state_dict = checkpoint['optimizer']
        optimizer.load_state_dict(optimizer_state_dict)
    except Exception as e:
        print('Fail to load optimizer: {}'.format(e))
    return optimizer


if __name__ == '__main__':
    pass
