import torch
import torch.nn as nn

import numpy as np

from einops import rearrange


class GridMaskLayer(nn.Module):
    def __init__(self, use_dynamic_mask_ratio=False, use_hierarchical_mask=False, basic_mask_ratio=0.5,
                 scale=0.5, drop_ratio=0.3):
        super(GridMaskLayer, self).__init__()
        self.scale = scale
        self.drop_ratio = drop_ratio
        self.step = 0

        self.use_dynamic_mask_ratio = use_dynamic_mask_ratio
        self.use_hierarchical_mask = use_hierarchical_mask
        self.basic_mask_ratio = basic_mask_ratio
        self.mask_list = []

    def forward(self, x):
        x = self.mask_token(x)
        return x

    def mask_token(self, x):
        # token resolution: [b, 48, 48, 48, 48]
        b, c, z, h, w = x.size()

        x = rearrange(x, 'b c z h w -> b c (z h w)')

        num_mask = int(x.size()[-1] * self.basic_mask_ratio)
        mask_seq = np.concatenate([np.zeros(num_mask),
                                   np.ones(x.size()[-1] - num_mask)])

        np.random.shuffle(mask_seq)

        mask_seq = torch.from_numpy(np.array(mask_seq))
        x *= mask_seq.to(x.device)

        x = x.permute(1, 2, 0)
        x = x.reshape(b, c, z, h, w)

        self.mask_list.append(mask_seq)

        return x

    def get_masked_gt(self, x):
        # [b, 1, 96, 96, 96]
        b, c, d, h, w = x.size()

        x = x.view(b, c, d // 2, 2, h // 2, 2, w // 2, 2)
        # [b, c, 48, 48, 48, 2, 2, 2]
        x = x.permute(0, 1, 2, 4, 6, 3, 5, 7)

        x = rearrange(x, 'b c rs_d rs_h rs_w pd ph pw -> b c (rs_d rs_h rs_w) (pd ph pw)')
        x = x.permute(0, 1, 3, 2)

        x *= (1. - self.mask_list[0]).to(x.device)
        self.mask_list = []
        x = x.permute(0, 1, 2, 3)
        x = x.reshape(b, c, d // 2, h // 2, w // 2, 2, 2, 2)
        x = x.permute(0, 1, 2, 5, 3, 6, 4, 7)

        x = rearrange(x, 'b c gz nz gh nh gw nw -> b c (gz nz) (gh nh) (gw nw)')
        return x








