import torch
from torch import nn


class Gate(torch.autograd.Function):
    @staticmethod
    def forward(ctx, x):
        return torch.Tensor.float(torch.gt(x, torch.zeros_like(x)))

    @staticmethod
    def backward(ctx, g):
        grad_input = g.clone()
        return grad_input, None


class ABCM(nn.Module):
    def __init__(self, channel_num):
        super(ABCM, self).__init__()
        self.channel_num = channel_num
        mask = torch.ones(channel_num)
        self.mask = nn.Parameter(mask)

    def forward(self, x):
        gate_score = Gate.apply(torch.sigmoid(self.mask)-0.5)
        return torch.einsum('ijkh,j->ijkh', x, gate_score)
