import copy
from torch import nn
import torch
import torch.nn.functional as F
import types


def snip_forward_conv2d(self, x):
    return F.conv2d(x, self.weight*self.weight_mask, self.bias, self.stride, self.padding, self.dilation, self.groups)

def snip_forward_linear(self, x):
    return F.linear(x, self.weight*self.weight_mask, self.bias)


def snip(net, keep_ratios, trainloader, devive):
    inputs, labels = next(iter(trainloader))
    inputs = inputs.to(devive)
    labels = labels.to(devive)
    net = copy.deepcopy(net)
    net = net.to(devive)

    for layer in net.modules():
        if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
            layer.weight_mask = nn.Parameter(torch.ones_like(layer.weight))
            nn.init.xavier_normal_(layer.weight)
            # 权重反向传播时没有梯度，反向传播时权重掩码进行梯度变化
            layer.weight.requires_grad = False

            # Override the forward methods:
        if isinstance(layer, nn.Conv2d):
                layer.forward = types.MethodType(snip_forward_conv2d, layer)

        if isinstance(layer, nn.Linear):
                layer.forward = types.MethodType(snip_forward_linear, layer)

    net.zero_grad()
    outputs = net.forward(inputs)
    loss = F.nll_loss(outputs, labels)
    loss.backward()

    layer_mask_grad = []
    for layer in net.modules():
        if isinstance(layer, nn.Conv2d) or isinstance(layer, nn.Linear):
            layer_mask_grad.append(torch.abs(layer.weight_mask.grad))
    # for layer in layer_mask_grad:
    #     print('layer_mask_grad_shape', layer.shape)

    all_scores = torch.cat([torch.flatten(i) for i in layer_mask_grad])
    norm_factor = torch.sum(all_scores)
    all_scores.div_(norm_factor)

    num_parm_to_keep = int(len(all_scores) * keep_ratios)
    threshold, _ = torch.topk(all_scores, num_parm_to_keep, sorted=True)
    acceptable_score = threshold[-1]

    keep_masks = []
    for g in layer_mask_grad:
        keep_masks.append(((g / norm_factor) >= acceptable_score).float())
    print(torch.sum(torch.cat([torch.flatten(x == 1) for x in keep_masks])))

    return keep_masks



