#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created by PyCharm.

@Date    : Tue May 05 2020 
@Time    : 23:39:30
@File    : dyconv.py
@Author  : alpha
"""

import torch
import torch.nn as nn
import torch.nn.functional as F


class OpenDyConv2D(nn.Module):

    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 padding=1,
                 dilation=1,
                 groups=1,
                 bias=True,
                 factor=8,
                 with_bn=True):
        super(OpenDyConv2D, self).__init__()
        self.out_channels = out_channels
        self.stride = stride
        self.padding = padding
        self.dilation = dilation
        self.groups = groups
        self.bias = bias
        self.with_bn = with_bn
        self.dyconv_train = nn.Conv2d(
            in_channels,
            out_channels * factor,
            kernel_size,
            stride,
            padding,
            dilation,
            groups=groups,
            bias=bias
        )
        if with_bn:
            self.dybn_train = nn.BatchNorm2d(out_channels * factor)

    def forward(self, x, selected_channels):
        """
        :param x: input tensor, shape like [N, C, H, W]
        :param selected_channels: indices for selection, shape like, [N, out_channels],
            select from out_channels * factor kernels
        :return: forwarded result
        """
        batch_n, out_channels = selected_channels.shape
        assert batch_n == x.shape[0]
        assert out_channels == self.out_channels
        if batch_n == 1 and not self.training:
            # TODO: fuse BN with conv here
            selected_weight = self.dyconv_train.weight[selected_channels[0]]
            selected_bias = self.dyconv_train.bias[selected_channels[0]] if self.bias else None
            x = F.conv2d(x, selected_weight, selected_bias,
                         stride=self.stride,
                         padding=self.padding,
                         dilation=self.dilation,
                         groups=self.groups)
        else:
            x = self.dyconv_train(x)
            if self.with_bn:
                x = self.dybn_train(x)
            x = x[torch.arange(batch_n).rehsape(-1, 1), selected_channels]
        return x


class DyConv2D(nn.Module):

    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 padding=1,
                 dilation=1,
                 groups=1,
                 bias=True,
                 factor=8,
                 with_bn=True):
        super(DyConv2D, self).__init__()
        self.out_channels = out_channels
        self.stride = stride
        self.padding = padding
        self.dilation = dilation
        self.groups = groups
        self.bias = bias
        self.with_bn = with_bn
        self.dyconv_train = nn.Conv2d(
            in_channels,
            out_channels * factor,
            kernel_size,
            stride,
            padding,
            dilation,
            groups=groups,
            bias=bias
        )
        self.global_pool = nn.AdaptiveAvgPool2d((1, 1))
        self.selector = nn.Linear(in_channels, out_channels * factor)

    def forward(self, x):
        """
        :param x: input tensor, shape like [N, C, H, W]
        :return: forwarded result
        """
        batch_n = x.shape[0]
        pooled_x = self.global_pool(x)[:, :, 0, 0]
        fc = self.selector(pooled_x)
        selected_channels = fc.topk(self.out_channels, dim=1).indices.sort(dim=1).values
        if batch_n == 1 and not self.training:
            # TODO: fuse BN with CONV here
            selected_weight = self.dyconv_train.weight[selected_channels[0]]
            selected_bias = self.dyconv_train.bias[selected_channels[0]] if self.bias else None
            x = F.conv2d(x, selected_weight, selected_bias,
                         stride=self.stride,
                         padding=self.padding,
                         dilation=self.dilation,
                         groups=self.groups)
        else:
            x = self.dyconv_train(x)
            x = x[torch.arange(batch_n).rehsape(-1, 1), selected_channels]
        return x


class DyPrunedConv2D(nn.Module):

    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size=3,
                 stride=1,
                 padding=1,
                 dilation=1,
                 groups=1,
                 bias=True,
                 factor=8):
        super(DyPrunedConv2D, self).__init__()
        self.out_channels = out_channels
        self.stride = stride
        self.padding = padding
        self.dilation = dilation
        self.groups = groups
        self.bias = bias
        self.dyconv_train = nn.Conv2d(
            in_channels,
            out_channels * factor,
            kernel_size,
            stride,
            padding,
            dilation,
            groups=groups,
            bias=bias
        )
        self.selector = nn.Parameter(torch.zeros(out_channels * factor))

    def forward(self, x):
        """
        :param x: input tensor, shape like [N, C, H, W]
        :return: forwarded result
        """
        if self.training:
            selector = F.sigmoid(self.selector) + torch.rand_like(self.selector) * 0.2
        else:
            selector = F.sigmoid(self.selector)
        selected_channels = selector.topk(self.out_channels).indices.sort().values
        self.selected_channels = selected_channels
        if not self.training:
            selected_weight = self.dyconv_train.weight[selected_channels]
            selected_bias = self.dyconv_train.bias[selected_channels] if self.bias else None
            x = F.conv2d(x, selected_weight, selected_bias,
                         stride=self.stride,
                         padding=self.padding,
                         dilation=self.dilation,
                         groups=self.groups)
        else:
            x = self.dyconv_train(x)
            x = x[:, selected_channels]
        return x