import torch


def reshape_patch_size(input_Tensor: torch.Tensor, patch_size: int):
    assert 5 == input_Tensor.ndim

    batch, seq, channel, height, width = input_Tensor.shape
    # print(input_Tensor.shape)
    if height % patch_size != 0 or width % patch_size != 0:
        raise AssertionError("裁剪尺寸不正确")

    input_Tensor = input_Tensor.reshape(batch, seq, channel, height // patch_size, patch_size, width // patch_size, patch_size)
    input_Tensor = input_Tensor.permute(0, 1, 2, 4, 6, 3, 5)
    input_Tensor = input_Tensor.reshape(batch, seq, channel * patch_size * patch_size, height // patch_size, width // patch_size)
    return input_Tensor


def reshape_patch_back(input_Tensor: torch.Tensor, patch_size: int):
    assert 5 == input_Tensor.ndim
    batch, seq, channel, height, width = input_Tensor.shape

    if channel % (patch_size * patch_size) != 0:
        raise AssertionError("裁剪尺寸不正确")

    input_Tensor = input_Tensor.reshape(batch, seq, channel // (patch_size * patch_size), patch_size, patch_size, height, width)
    input_Tensor = input_Tensor.permute(0, 1, 2, 5, 3, 6, 4)
    input_Tensor = input_Tensor.reshape(batch, seq, channel // (patch_size * patch_size), height * patch_size, width * patch_size)
    return input_Tensor
