import torch
from torch import nn
from torch.nn import functional as F


class DropBlock(nn.Module):
    """
        Task:
            randomly drop masked region of a conv layer activation output
            only drop out during training, and directly return activation output during inference
        Parameters:
            region size: block_size
            drop ratio: 1 - keep_prob
        Forward returns:
            training: output after dropout
            inference: direct output
    """
    def __init__(self, block_size, drop_ratio=0.9):
        super(DropBlock, self).__init__()
        self.block_size = block_size # dropout rate
        self.keep_prob = 1 - drop_ratio # for computing parameter of Bernoulli distribution
        
    def forward(self, x):
        assert x.dim()==4, "[from DropBlock] unexpected dimension of x, need [N, chn, H, W]"
        if self.keep_prob == 1 or not self.training:
            return x # only dropout during training
        chn, feat_h, feat_w = x.shape[1:4] # get feature map size (N, chn, H, W)
        # compute bernoulli parameter
        gamma = (1 - self.keep_prob) / (self.block_size ** 2)
        gamma *= feat_h / (feat_h - self.block_size + 1)
        gamma *= feat_w / (feat_w - self.block_size + 1)
        # get mask from bernoulli distribution
        M = torch.bernoulli(torch.ones_like(x) * gamma)
        kernel = torch.ones(size=(chn, 1, self.block_size, self.block_size)) # chn channels, sqaure kernel
        kernel = kernel.to(device=x.device, dtype=x.dtype) # move to same device as x
        mapped_x = F.conv2d(M, kernel, padding=self.block_size//2, groups=chn)
        torch.set_printoptions(threshold=5000)
        mask = (mapped_x < 1).to(device=x.device, dtype=x.dtype) 
        return x * mask * mask.numel() / mask.sum()
