import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddle.nn.initializer import XavierNormal
from paddle import ParamAttr



class Mish(nn.Layer):
    def __init__(self):
        super().__init__()

    def forward(self, x):
        return x * paddle.tanh(F.softplus(x))

def mish(x):
    return x * paddle.tanh(F.softplus(x))



class ChannelAttention(nn.Layer):

    def __init__(self, inplanes, reduction=8, pool_types=['avg', 'max']):
        super().__init__()
        self.pool_types = pool_types
        self.mlp = nn.Sequential(
            nn.Flatten(),
            nn.Linear(inplanes, inplanes // reduction),
            nn.ReLU(),
            nn.Linear(inplanes // reduction, inplanes)
        )
        self.poolings = self._make_poolings(pool_types)

    def _make_poolings(self, pool_types):
        poolings = nn.LayerList()
        for pool in pool_types:
            if pool == 'avg':
                poolings.append(nn.AdaptiveAvgPool2D((1, 1)))
            if pool == 'max':
                poolings.append(nn.AdaptiveMaxPool2D((1, 1)))
        return poolings
 
    def forward(self, x):
        scale = 0
        for pool in self.poolings:
            scale = scale + self.mlp(pool(x))
        
        shape = list(scale.shape) + [1] * (len(x.shape) - 2)
        scale = F.sigmoid(scale).reshape(shape)
        return x * scale


class SpatialAttention(nn.Layer):

    def __init__(self):
        super().__init__()
        self.conv = nn.Sequential(
            nn.Conv2D(2, 1, 7, 1, 3, bias_attr=False),
            nn.BatchNorm2D(1),
            nn.ReLU()
        )

    def forward(self, x):
        scale = paddle.concat([
            paddle.unsqueeze(paddle.max(x, 1), 1),
            paddle.unsqueeze(paddle.mean(x, 1), 1),
        ], axis=1)
        scale = self.conv(scale)
        scale = F.sigmoid(scale)
        return x * scale


class CBAM(nn.Layer):

    def __init__(self, inplanes, reduction=16, pool_types=['avg', 'max'], use_spatial=True):
        super().__init__()
        self.channel_att = ChannelAttention(inplanes, reduction, pool_types)
        self.spatial_att = None
        if use_spatial:
            self.spatial_att = SpatialAttention()
        
    def forward(self, x):
        out = self.channel_att(x)
        if self.spatial_att is not None:
            out = self.spatial_att(out)
        return out



