import torch
import torch.nn as nn
import torch.nn.functional as F

from typing import Tuple, List, Optional, Union

import inspect
from ..tools.valid_tools import isconsistentinstance
from ..bases.base_types import _size_2_t


class ConvolutionBatchnormActivationModule(nn.Module):
    '''
        Conv2d - Batchnorm - Activation Module
    '''

    def __init__(self, in_channels: int, out_channels: int,
                 kernel_size: _size_2_t, stride: _size_2_t,
                 activation: Optional[str] = None, **kwargs) -> None:
        super(ConvolutionBatchnormActivationModule, self).__init__()
        if isinstance(activation, str) and activation.lower() == 'none':
            activation = None

        if isinstance(kernel_size, int):
            pad: int = (kernel_size - 1) // 2
        elif isinstance(kernel_size, Tuple):
            if len(kernel_size) != 2:
                raise ValueError(f"excepted length of tuple 'kernel_size' is 2, but got {len(kernel_size)} instead")
            if isconsistentinstance(kernel_size, int):
                pad: Tuple[int, int] = ((kernel_size[0] - 1) // 2, (kernel_size[1] - 1) // 2)
            else:
                raise ValueError(f"excepted 'kernel_size' as a tuple of two integers")
        else:
            raise TypeError(f"excepted 'kernel_size' as a integer or a tuple of two integers")
        if activation is not None and getattr(nn, activation, None) is None:
            raise ValueError(f"invalid activation funcion '{activation}', "
                             f"NOTE: specificed activation function must be in torch.nn, not torch.nn.functional")

        # separate conv arguments from activation arguments
        conv_args: dict = {}
        act_args: dict = {}
        for name, value in kwargs.items():
            if name in inspect.getfullargspec(nn.Conv2d).args:
                conv_args[name] = value
            elif activation is not None:
                if name in inspect.getfullargspec(getattr(nn, activation)).args:
                    act_args[name] = value
            else:
                raise ValueError(f"invalid arguments '{name}' passed to CBAM constructor")

        self.conv = nn.Conv2d(in_channels=in_channels,
                              out_channels=out_channels,
                              kernel_size=kernel_size,
                              stride=stride, bias=False,
                              padding=pad, **conv_args)
        self.bn = nn.BatchNorm2d(out_channels)
        if activation is not None:
            self.activation = getattr(nn, activation)(**act_args)
        else:
            self.activation = None

        # initialize
        if activation == 'LeakyReLU':
            if 'negative_slope' in kwargs:
                a = kwargs['negative_slope']
            else:
                a = 0.01  # default slope according to pytorch docs
            nn.init.kaiming_normal_(self.conv.weight, a=a, mode='fan_out', nonlinearity='leaky_relu')
        elif activation == 'ReLU':
            nn.init.kaiming_normal_(self.conv.weight, mode='fan_out', nonlinearity='relu')
        elif activation is not None:
            try:
                nn.init.xavier_uniform_(self.conv.weight, gain=nn.init.calculate_gain(activation, **act_args))
            except ValueError:
                nn.init.xavier_uniform_(self.conv.weight)
        else:
            nn.init.xavier_uniform_(self.conv.weight)
        nn.init.constant_(self.bn.weight, 1)
        nn.init.constant_(self.bn.bias, 0)

    def forward(self, x: torch.tensor) -> torch.tensor:
        x = self.conv(x)
        x = self.bn(x)
        if self.activation is not None:
            x = self.activation(x)
        return x
