import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd.function import Function

class P2Conv2d(nn.Conv2d):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._enable_prune = True
        self.alpha = nn.Parameter(torch.empty(self.weight.shape))
        self.register_buffer('thres', torch.empty([]))
        self.register_buffer('mask', torch.empty(self.weight.shape))
        nn.init.uniform_(self.alpha, -4, 4)
    
    def enable_prune(self):
        self._enable_prune = True
    
    def disable_prune(self):
        self._enable_prune = False
    
    def forward(self, x):
        w = self.weight
        if self._enable_prune:
            m = torch.tanh(self.alpha - self.thres)
            m =  F.relu(m)
            self.mask.copy_(m.clone().detach())
            w = m * w
        return F.conv2d(x, w, self.bias, self.stride, self.padding, self.dilation, self.groups)
    
    @classmethod
    def from_dense(cls, mod: nn.Conv2d):
        new_mod = cls(
            in_channels = mod.in_channels,
            out_channels = mod.out_channels,
            kernel_size = mod.kernel_size,
            bias = mod.bias is not None,
            stride = mod.stride,
            padding = mod.padding,
            dilation = mod.dilation,
            groups = mod.groups
        )
        new_mod.weight = mod.weight.clone().detach()
        if mod.bias is not None:
            new_mod.bias = mod.bias.clone().detach()
        return new_mod.to(mod.weight.device)

class P2Linear(nn.Linear):
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self._enable_prune = True
        self.alpha = nn.Parameter(torch.empty(self.weight.shape))
        self.register_buffer('thres', torch.empty([]))
        self.register_buffer('mask', torch.empty(self.weight.shape))
        nn.init.uniform_(self.alpha, -4, 4)
    
    def enable_prune(self):
        self._enable_prune = True
    
    def disable_prune(self):
        self._enable_prune = False
    
    def forward(self, x):
        w = self.weight
        if self._enable_prune:
            m = torch.tanh(self.alpha - self.thres)
            m =  F.relu(m)
            self.mask.copy_(m.clone().detach())
            w = m * w
        return F.linear(x, w, self.bias)
    
    @classmethod
    def from_dense(cls, mod: nn.Linear):
        new_mod = cls(
            in_features = mod.in_features,
            out_features = mod.out_features,
            bias = mod.bias is not None,
        )
        new_mod.weight = mod.weight.clone().detach()
        if mod.bias is not None:
            new_mod.bias = mod.bias.clone().detach()
        return new_mod.to(mod.weight.device)


p2mapping= {
    nn.Conv2d: P2Conv2d,
    nn.Linear: P2Linear,
}