import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd.function import Function
import math
import re
import numpy as np
import logging
from .modules.conv import PConv2d, STRConv2d

def leaky_mask(alpha, mask, leaky=0.01):
    # mask: 0 means pruning, 1 means reserving
    mask = (1 - mask.float()) * leaky + mask.float() * (1-leaky)
    return alpha + (mask - alpha).detach()

class Alpha2Mask(Function):
    @staticmethod
    def forward(ctx, alpha, thres):
        return (alpha > thres).float()

    @staticmethod
    def backward(ctx, grad_output):
        grad_input = grad_output
        return grad_input, None

class PMaskController:
    def __init__(self, model, pattern='.*'):
        self.model = model
        self.prune_ratio = 0

        self.replace_conv2d(self.model, pattern)
        self.mods = {}
        self.alpha = {}
        self.N = 0
        for idx, m in enumerate(model.modules()):
            if isinstance(m, PConv2d):
                self.mods[idx] = m
                self.alpha[idx] = m.alpha.reshape([-1])
                self.N += self.alpha[idx].shape[0]
        self.enable_prune()
    
    def set_prune_ratio(self, v):
        assert 0 <= v < 1
        self.prune_ratio = v

    def replace_conv2d(self, mod :nn.Module, pattern='.*', prefix=None):
        for name, m in mod.named_children():
            fullname = prefix + '.' + name if prefix else name
            if isinstance(m, nn.Conv2d) and re.search(pattern, fullname):
                if pattern != '.*':
                    logging.info(f'{fullname}: Conv2d is replaced with PConv2d')
                mod._modules[name] = PConv2d.from_dense(m)
            else:
                self.replace_conv2d(m, pattern=pattern, prefix=fullname)
        
    def update_thres(self, update_mask=False):
        if self.prune_ratio < 0:
            raise ValueError('prune_ratio is negative during updating')
        if self.prune_ratio == 0:
            v = -float('inf')
        else:
            all_alpha = torch.cat(list(self.alpha.values()))
            n = math.ceil(self.N * self.prune_ratio)
            v, idx =  torch.topk(all_alpha, n, largest=False, sorted=True)
            v = v[-1].item()
        for name, mod in self.mods.items():
            mod.thres.fill_(v)
            if update_mask:
                mod.mask = mod.alpha > v
    
    def enable_prune(self):
        for m in self.model.modules():
            if isinstance(m, PConv2d):
                m.enable_prune()
    
    def disable_prune(self):
        for m in self.model.modules():
            if isinstance(m, PConv2d):
                m.disable_prune()
    
    def get_sparsity(self):
        N = 0
        NZ = 0 # zero
        for name, m in self.model.named_modules():
            if isinstance(m, PConv2d):
                N += np.prod(m.weight.shape)
                NZ += (m.mask.abs() == 0).float().sum().item()
                # print(name, N, NZ, m.thres.item())
        return NZ / N


class STRController:
    def __init__(self, model, sinit):
        self.model = model
        self.sinit = sinit

        self.replace_conv2d(self.model)
        self.mods = {}
        for idx, m in enumerate(model.modules()):
            if isinstance(m, STRConv2d):
                self.mods[idx] = m
        self.enable_prune()

    def replace_conv2d(self, mod :nn.Module):
        for name, m in mod._modules.items():
            if isinstance(m, nn.Conv2d):
                mod._modules[name] = STRConv2d.from_dense(m, sinit=self.sinit)
            else:
                self.replace_conv2d(m)
    
    def get_sparsity(self):
        N = 0
        NZ = 0 # zero
        for name, m in self.model.named_modules():
            if isinstance(m, STRConv2d):
                N += np.prod(m.weight.shape)
                NZ += (m.mask.abs() == 0).float().sum().item()
                # print(name, N, NZ, m.thres.item())
        return NZ / N
    
    def enable_prune(self):
        for m in self.model.modules():
            if isinstance(m, STRConv2d):
                m.enable_prune()


def replace_conv2d(mod :nn.Module, dst_type, pattern='.*', prefix=None):
    for name, m in mod.named_children():
        fullname = prefix + '.' + name if prefix else name
        if isinstance(m, nn.Conv2d) and re.search(pattern, fullname):
            if pattern != '.*':
                logging.info(f'{fullname}: Conv2d is replaced with {dst_type.__name__}')
            mod._modules[name] = dst_type.from_dense(m)
        else:
            replace_conv2d(m, dst_type, pattern=pattern, prefix=fullname)


def replace_prune_module(mod: nn.Module, mapping: dict, pattern='.*', prefix=None, device=None):
    if device is None:
        device = list(mod.parameters())[0].device

    for name, m in mod.named_children():
        fullname = prefix + '.' + name if prefix else name
        if type(m) in mapping and re.search(pattern, fullname):
            dst_type = mapping[type(m)]
            logging.info(f'{fullname}: {type(m).__name__} is replaced with {dst_type.__name__}')
            mod._modules[name] = dst_type.from_dense(m).to(device)
        else:
            replace_prune_module(m, mapping, pattern=pattern, prefix=fullname, device=device)