import torch.nn.functional as F
import torch
import torch.nn as nn

class ConvNorm(nn.Module):
    def __init__(self,
                 in_planes,
                 out_planes,
                 kernel_size=1,
                 stride=1,
                 padding=0,
                 dilation=1,
                 groups=1,
                 with_bn=True,
                 norm_type='BN'):
        super().__init__()
        self.with_bn = with_bn
        self.conv = nn.Conv2d(in_planes, out_planes, kernel_size, stride, padding, dilation, groups)
        if self.with_bn:
            self.norm = nn.BatchNorm2d(out_planes)
            torch.nn.init.constant_(self.norm.weight, 1)
            torch.nn.init.constant_(self.norm.bias, 0)

    def forward(self, x):
        #(B, C, H, W)
        x = self.conv(x)
        if self.with_bn:
            x = self.norm(x)
        return x


class StarBlock(nn.Module):
    def __init__(self,
                 dim,
                 mlp_ratio=4.0,
                 drop_path=0.,
                 norm_type='BN'):
        super().__init__()
        self.dwconv = ConvNorm(dim, dim, 7, 1, (7 - 1) // 2, groups=dim, with_bn=True)
        self.f1 = ConvNorm(dim, int(mlp_ratio * dim), 1, with_bn=False)
        self.f2 = ConvNorm(dim, int(mlp_ratio * dim), 1, with_bn=False)
        self.g = ConvNorm(int(mlp_ratio * dim), dim, 1, with_bn=True)
        self.dwconv2 = ConvNorm(dim, dim, 7, 1, (7 - 1) // 2, groups=dim, with_bn=False)
        self.act = nn.ReLU6()
        self.drop_path = nn.Dropout(drop_path) if drop_path > 0. else nn.Identity()

    def forward(self, x):
        input = x
        x = self.dwconv(x)
        x1, x2 = self.f1(x), self.f2(x)
        x = self.act(x1) * x2
        x = self.g(x)
        x = self.dwconv2(x)
        x = input + self.drop_path(x)
        return x


class FFEM(nn.Module):
    def __init__(self,
                 dim,
                 mlp_ratio=4.0,
                 drop_path=0.,
                 reduction=4,
                 norm_type='BN'):
        super().__init__()
        self.dwconv1 = ConvNorm(dim, dim, 7, 1, (7 - 1) // 2, groups=dim, with_bn=True)
        self.f1 = ConvNorm(dim, int(mlp_ratio * dim), 1, with_bn=False)
        self.f2 = ConvNorm(dim, int(mlp_ratio * dim), 1, with_bn=False)

        self.avg_pool = nn.AdaptiveAvgPool2d(1)
        self.f_se = nn.Sequential(
            nn.Linear(dim, dim // reduction, bias=False),
            nn.ReLU(inplace=True),
            nn.Linear(dim // reduction, dim, bias=False),
            nn.Sigmoid()
        )

        self.pwconv_star = ConvNorm(int(mlp_ratio * dim), dim, 1, with_bn=True)
        self.pwconv_se = ConvNorm(dim, dim, 1, with_bn=True)
        self.dwconv2 = ConvNorm(dim, dim, 7, 1, (7 - 1) // 2, groups=dim, with_bn=False)
        self.act = nn.ReLU6()
        self.drop_path = nn.Dropout(drop_path) if drop_path > 0. else nn.Identity()

    def forward(self, x):
        B, C, H, W = x.size()
        input = x

        x = self.dwconv1(x)
        x1, x2= self.f1(x), self.f2(x)
        se = self.avg_pool(x).view(B, C)  # squeeze: (B,C,H,W) ==> avg_pool ===> (B,C,1,1) ==> view ===> (B, C)
        se = self.f_se(se).view(B, C, 1, 1)  # excitation:  (B,C) ===> (B,C,1,1)

        x = self.act(x1) * x2
        x = self.pwconv_star(x)

        x = x * se
        x = self.pwconv_se(x)
        x = self.dwconv2(x)
        x = input + self.drop_path(x)
        return x

if __name__ == '__main__':
    input = torch.rand(1, 192, 56, 56)
    model = FFEM(dim=192)
    output = model(input)
    print(output.shape)  # torch.Size([1, 3136, 192])