import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd.function import Function

class Alpha2Mask(Function):
    @staticmethod
    def forward(ctx, alpha, thres):
        return (alpha > thres).float()

    @staticmethod
    def backward(ctx, grad_output):
        grad_input = grad_output
        return grad_input, None

class PConv2d(nn.Conv2d):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._enable_prune = True
        self.alpha = nn.Parameter(torch.empty(self.weight.shape))
        self.register_buffer('thres', torch.empty([]))
        self.register_buffer('mask', torch.empty(self.weight.shape))
        nn.init.uniform_(self.alpha)
    
    def enable_prune(self):
        self._enable_prune = True
    
    def disable_prune(self):
        self._enable_prune = False
    
    def forward(self, x):
        w = self.weight
        if self._enable_prune:
            m = Alpha2Mask.apply(self.alpha, self.thres.detach())
            self.mask.copy_(m.clone().detach())
            w = m * w
        return F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
    
    @classmethod
    def from_dense(cls, mod: nn.Conv2d):
        pconv2d = cls(
            in_channels = mod.in_channels,
            out_channels = mod.out_channels,
            kernel_size = mod.kernel_size,
            bias = mod.bias is not None,
            stride = mod.stride,
            padding = mod.padding,
            dilation = mod.dilation,
            groups = mod.groups
        )
        pconv2d.weight = mod.weight
        if mod.bias is not None:
            pconv2d.bias = mod.bias
        return pconv2d.to(mod.weight.device)


class STRConv2d(nn.Conv2d):
    def __init__(self, *args, sinit=-100, **kwargs):
        super().__init__(*args, **kwargs)
        self._enable_prune = True
        self.thres = nn.Parameter(torch.tensor([sinit], dtype=torch.float32))
        self.register_buffer('mask', torch.empty(self.weight.shape))
    
    def enable_prune(self):
        self._enable_prune = True
    
    def disable_prune(self):
        self._enable_prune = False
    
    def forward(self, x):
        if self._enable_prune:
            w = self.weight
            m = torch.relu( torch.abs(w) - torch.sigmoid(self.thres) )
            w = torch.sign(w) * m
            self.mask.copy_( (m > 0).float(), non_blocking=True)
        else:
            w = self.weight
        return F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
    
    @classmethod
    def from_dense(cls, mod: nn.Conv2d, sinit):
        new_mod = cls(
            sinit = sinit,
            in_channels = mod.in_channels,
            out_channels = mod.out_channels,
            kernel_size = mod.kernel_size,
            bias = mod.bias is not None,
            stride = mod.stride,
            padding = mod.padding,
            dilation = mod.dilation,
            groups = mod.groups
        )
        new_mod.weight = mod.weight
        if mod.bias is not None:
            new_mod.bias = mod.bias
        return new_mod.to(mod.weight.device)