import torch
import torch.nn as nn
from torchvision.ops import DeformConv2d
import torch.nn.functional as F

class ResidualBlock(nn.Module):
    def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1):
        super(ResidualBlock, self).__init__()
        self.kernel_size = kernel_size
        self.stride = stride
        self.padding = (kernel_size -1 )*dilation //2
        self.dilation = dilation

        self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, dilation=dilation, bias=False)
        self.norm1 = nn.BatchNorm2d(out_channels)
        # self.norm1 = build_norm_layer(out_channels, 'BN', in_format='channels_first', out_format='channels_first')
        self.relu = nn.ReLU(inplace=True)

        if in_channels != out_channels:
            if stride == 1:
                self.shortcut = nn.Sequential(
                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, bias=False),
                    nn.BatchNorm2d(out_channels),
                    # build_norm_layer(out_channels, 'BN', in_format='channels_first', out_format='channels_first')
                )
            else:
                self.shortcut = nn.Sequential(
                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, bias=False),
                    nn.BatchNorm2d(out_channels),
                    # build_norm_layer(out_channels, 'BN', in_format='channels_first', out_format='channels_first'),
                    nn.AvgPool2d(kernel_size=2, stride=stride)
                )
        else:
            if stride == 1:
                self.shortcut = nn.Sequential()
            else:
                self.shortcut = nn.AvgPool2d(kernel_size=2, stride=stride)

    def forward(self, x):
        residual = x
        out = self.conv1(x)
        out = self.norm1(out)
        out += self.shortcut(residual)
        out = self.relu(out)
        return out

class GroupConvBN(nn.Module):
    def __init__(self, inp, oup, ks=3, stride=1, dilation=1, groups=1,
                 bn=True, act=None):
        """
        act : activation function: 'relu', 'relu6', 'leakyRelu', 'elu', 'gelu', 'mish', 'swish', 'hswish'
        """
        super(GroupConvBN, self).__init__()
        self.stride = stride
        self.kernel_size = ks
        self.padding = (ks-1)*dilation//2    #(kernel_size-1)*dilation//2
        self.dilation = dilation 

        self.block = nn.Sequential(
            nn.Conv2d(inp, oup, ks, stride, (ks - 1)*dilation//2, groups=groups, bias=False)
        )
        if bn:
            self.block.add_module('bn', nn.BatchNorm2d(oup))
        if act is not None:
            assert act in ['relu', 'relu6', 'leakyRelu', 'elu', 'gelu', 'mish', 'swish', 'hswish']
            if act == 'relu':
                self.block.add_module('act', nn.ReLU(inplace=True))
            if act == 'relu6':
                self.block.add_module('act', nn.ReLU6(inplace=True))
            if act == 'leakyRelu':
                self.block.add_module('act', nn.LeakyReLU(inplace=True))
            if act == 'elu':
                self.block.add_module('act', nn.ELU(inplace=True))
            if act == 'gelu':
                self.block.add_module('act', nn.GELU())
            if act == 'mish':
                self.block.add_module('act', nn.Mish(inplace=True))
            if act == 'swish': 
                self.block.add_module('act', Swish(1.0))
            if act == 'hswish':
                self.block.add_module('act', nn.Hardswish(inplace=True))
                                      
    def forward(self, x):
        return self.block(x)

class Swish(nn.Module):
    def __init__(self, beta):
        super(Swish, self).__init__()
        self.beta = nn.Parameter(torch.full((1,),beta))
    def forward(self, x):
        return x*F.sigmoid(self.beta*x)

class GroupDeformConvBN(nn.Module):
    def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1, groups=1):
        super(GroupDeformConvBN, self).__init__()
        self.stride = stride
        self.kernel_size = kernel_size
        self.padding = (kernel_size-1)*dilation//2    #(kernel_size-1)*dilation//2
        self.dilation = dilation 
        
        # self.offset = nn.Parameter(torch.randn(groups*2*kernel_size*kernel_size, 1, 1))
        self.p_conv = nn.Sequential(
            nn.Conv2d(in_channels, out_channels=1, kernel_size=1, stride=1, padding=0, bias=False),
            nn.Conv2d(1, 2*kernel_size*kernel_size, kernel_size=3, stride=1, padding=1, bias=False)
        )

        self.conv = DeformConv2d(in_channels, out_channels, kernel_size, stride, (kernel_size-1)*dilation//2, groups=in_channels, bias=False)
        self.bn = nn.BatchNorm2d(out_channels)
        self.act = nn.ReLU(inplace=True)
    
    def forward(self, x):
        offset = self.p_conv(x)
        # x = self.conv(x, self.offset.expand(B, -1, H, W))
        x = self.conv(x, offset)
        x = self.act(self.bn(x))
        return x
        
class GroupResidualBlock_Up(nn.Module):
    def __init__(self, in_channels, out_channels, kernel_size=3, stride=1, dilation=1, groups=None):
        super(GroupResidualBlock_Up, self).__init__()
        self.stride = stride
        self.kernel_size = kernel_size
        self.padding = 2*dilation//2    #(kernel_size-1)*dilation//2
        self.dilation =dilation 

        if groups is None:
            self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=self.padding, dilation=dilation, groups=in_channels, bias=False)
        else:
            self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=self.padding, dilation=dilation, groups=groups, bias=False)
            
        self.norm1 = nn.BatchNorm2d(out_channels)
        # self.norm1 = build_norm_layer(out_channels, 'BN', in_format='channels_first', out_format='channels_first')
        self.relu = nn.ReLU(inplace=True)

        if in_channels != out_channels:
            if stride == 1:
                self.shortcut = nn.Sequential(
                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, groups=in_channels, bias=False),
                    nn.BatchNorm2d(out_channels),
                    # build_norm_layer(out_channels, 'BN', in_format='channels_first', out_format='channels_first')
                )
            else:
                self.shortcut = nn.Sequential(
                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, groups=in_channels, bias=False),
                    nn.BatchNorm2d(out_channels),
                    # build_norm_layer(out_channels, 'BN', in_format='channels_first', out_format='channels_first'),
                    nn.AvgPool2d(kernel_size=2, stride=stride)
                )
        else:
            if stride == 1:
                self.shortcut = nn.Sequential()
            else:
                self.shortcut = nn.AvgPool2d(kernel_size=2, stride=stride)

    def forward(self, x):
        residual = x
        out = self.conv1(x)
        out = self.norm1(out)
        out += self.shortcut(residual)
        out = self.relu(out)
        return out

class GroupResidualBlock_Down(nn.Module):
    def __init__(self, in_channels, out_channels, stride=1):
        super(GroupResidualBlock_Down, self).__init__()
        self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=stride, padding=1, groups=out_channels, bias=False)
        self.bn1 = nn.BatchNorm2d(out_channels)
        self.relu = nn.ReLU(inplace=True)

        if in_channels != out_channels:
            if stride == 1:
                self.shortcut = nn.Sequential(
                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, groups=out_channels, bias=False),
                    nn.BatchNorm2d(out_channels),
                )
            else:
                self.shortcut = nn.Sequential(
                    nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, groups=out_channels, bias=False),
                    nn.BatchNorm2d(out_channels),
                    nn.AvgPool2d(kernel_size=2, stride=stride)
                )
        else:
            if stride == 1:
                self.shortcut = nn.Sequential()
            else:
                self.shortcut = nn.AvgPool2d(kernel_size=2, stride=stride)

    def forward(self, x):
        residual = x
        out = self.conv1(x)
        out = self.bn1(out)
        out += self.shortcut(residual)
        out = self.relu(out)
        return out
