'''
 * @Author: Benjay·Shaw
 * @Date: 2024-10-31 17:07:50
 * @LastEditors: Benjay·Shaw
 * @LastEditTime: 2024-10-31 17:24:36
 * @Description: 公用模块
'''
import paddle
import warnings
from attention import *
from common_function import *


class Decoder(paddle.nn.Layer):

    def __init__(self, in_ch, classes, attention_name='SELayer'):
        super(Decoder, self).__init__()
        num_classes = classes
        if attention_name == 'SELayer':
            self.attention_name = SELayer(in_ch)
        if attention_name == 'SKConv':
            self.attention_name = SKConv(in_ch, in_ch)
        self.conv3_1 = paddle.nn.Conv2D(in_channels=in_ch // 4,
            out_channels=num_classes * 8, kernel_size=3, padding=1)
        self.conv3_2 = paddle.nn.Conv2D(in_channels=num_classes * 8,
            out_channels=num_classes * 4, kernel_size=3, padding=1)
        self.ps2 = paddle.nn.PixelShuffle(upscale_factor=2)
        self.ps3 = paddle.nn.PixelShuffle(upscale_factor=2)

    def forward(self, x):
        x = self.attention_name(x)
        x = self.ps2(x)
        x = self.conv3_1(x)
        x = self.conv3_2(x)
        x = self.ps3(x)
        return x


class Upsample(paddle.nn.Layer):

    def __init__(self, in_ch, net_name='se_resnet50'):
        super(Upsample, self).__init__()
        self.conv1_1 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=
            in_ch, out_channels=in_ch * 2, kernel_size=3, padding=1),
            paddle.nn.ReLU())
        self.conv1_2 = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=
            in_ch * 2, out_channels=in_ch * 4, kernel_size=3, padding=1),
            paddle.nn.ReLU())
        if net_name == 'se_resnet50':
            self.ps = paddle.nn.PixelShuffle(upscale_factor=2)
        if net_name == 'sk_resnet50':
            self.ps = paddle.nn.PixelShuffle(upscale_factor=4)

    def forward(self, x):
        x = self.conv1_1(x)
        x = self.conv1_2(x)
        x = self.ps(x)
        return x


class CosineS(paddle.nn.Layer):

    def __init__(self):
        super(CosineS, self).__init__()
        self.cosines = paddle.nn.CosineSimilarity(axis=1)

    def forward(self, x1, x2):
        similarity = self.cosines(x1, x2)
        similarity = paddle.unsqueeze(x=similarity, axis=1)
        result1 = x2 * similarity.expand_as(y=x2)
        result2 = x1 * similarity.expand_as(y=x1)
        result = paddle.concat(x=[result1, result2], axis=1)
        return result


class LocallyAdap(paddle.nn.Layer):
    """ locally adaptive  """

    def __init__(self, in_ch, in_ch_ratio, attention_name='SELayer'):
        super(LocallyAdap, self).__init__()
        self.similarity = CosineS()
        self.conv = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=in_ch *
            2, out_channels=in_ch, kernel_size=3, padding=1), paddle.nn.
            BatchNorm2D(num_features=in_ch), paddle.nn.ReLU(), paddle.nn.
            Conv2D(in_channels=in_ch, out_channels=in_ch // in_ch_ratio,
            kernel_size=3, padding=1), paddle.nn.BatchNorm2D(num_features=
            in_ch // in_ch_ratio), paddle.nn.ReLU())
        self.conv.apply(weights_init_kaiming)
        if attention_name == 'SELayer':
            self.attention_name = SELayer(in_ch // in_ch_ratio)
        if attention_name == 'SKConv':
            self.attention_name = SKConv(in_ch // in_ch_ratio, in_ch //
                in_ch_ratio)

    def forward(self, x1, x2):
        batch_size = x1.shape[0]
        f = self.similarity(x1, x2)
        f = self.conv(f)
        y = self.attention_name(f)
        return y


class LocallyAdap2(paddle.nn.Layer):
    """ locally adaptive  """

    def __init__(self, in_ch):
        super(LocallyAdap2, self).__init__()
        self.similarity = CosineS()
        self.conv = paddle.nn.Sequential(paddle.nn.Conv2D(in_channels=in_ch *
            2, out_channels=in_ch, kernel_size=3, padding=1), paddle.nn.
            BatchNorm2D(num_features=in_ch), paddle.nn.ReLU(), paddle.nn.
            Conv2D(in_channels=in_ch, out_channels=in_ch // 2, kernel_size=
            3, padding=1), paddle.nn.BatchNorm2D(num_features=in_ch // 2),
            paddle.nn.ReLU())
        self.conv.apply(weights_init_kaiming)
        self.selayer = SELayer(in_ch // 2)

    def forward(self, x1, x2):
        batch_size = x1.shape[0]
        f = self.similarity(x1, x2)
        f = self.conv(f)
        y = self.selayer(f)
        return y


class FixedBatchNorm(paddle.nn.BatchNorm2D):

    def forward(self, input):
        return paddle.nn.functional.batch_norm(x=input, running_mean=self.
            running_mean, running_var=self.running_var, weight=self.weight,
            bias=self.bias, training=False, epsilon=self.eps)
