'''
 * @Author: Benjay·Shaw
 * @Date: 2024-10-31 17:07:50
 * @LastEditors: Benjay·Shaw
 * @LastEditTime: 2024-10-31 22:18:24
 * @Description: 公用模块
'''
import paddle
import paddle.nn as nn
import math
from utils.attention import *
from utils.common_function import weights_init_kaiming


class Decoder(nn.Module):

    def __init__(self, in_ch, classes, attention_name='SELayer'):
        super(Decoder, self).__init__()
        num_classes = classes
        if attention_name == 'SELayer':
            self.attention_name = SELayer(in_ch)
        if attention_name == 'SKConv':
            self.attention_name = SKConv(in_ch, in_ch)
        self.conv3_1 = nn.Conv2d(in_ch // 4, num_classes * 8, kernel_size=3,
            padding=1)
        self.conv3_2 = nn.Conv2d(num_classes * 8, num_classes * 4,
            kernel_size=3, padding=1)
        self.ps2 = nn.PixelShuffle(2)
        self.ps3 = nn.PixelShuffle(2)

    def forward(self, x):
        x = self.attention_name(x)
        x = self.ps2(x)
        x = self.conv3_1(x)
        x = self.conv3_2(x)
        x = self.ps3(x)
        return x


class Upsample(nn.Module):

    def __init__(self, in_ch, net_name='se_resnet50'):
        super(Upsample, self).__init__()
        self.conv1_1 = nn.Sequential(nn.Conv2d(in_ch, in_ch * 2,
            kernel_size=3, padding=1), nn.ReLU(inplace=True))
        self.conv1_2 = nn.Sequential(nn.Conv2d(in_ch * 2, in_ch * 4,
            kernel_size=3, padding=1), nn.ReLU(inplace=True))
        if net_name == 'se_resnet50':
            self.ps = nn.PixelShuffle(2)
        if net_name == 'sk_resnet50':
            self.ps = nn.PixelShuffle(4)

    def forward(self, x):
        x = self.conv1_1(x)
        x = self.conv1_2(x)
        x = self.ps(x)
        return x


class CosineS(nn.Module):

    def __init__(self):
        super(CosineS, self).__init__()
        self.cosines = nn.CosineSimilarity(dim=1)

    def forward(self, x1, x2):
        similarity = self.cosines(x1, x2)
        similarity = paddle.unsqueeze(x=similarity, axis=1)
        result1 = x2 * similarity.expand_as(y=x2)
        result2 = x1 * similarity.expand_as(y=x1)
        result = paddle.concat(x=[result1, result2], axis=1)
        return result


class LocallyAdap(nn.Module):
    """ locally adaptive  """

    def __init__(self, in_ch, in_ch_ratio, attention_name='SELayer'):
        super(LocallyAdap, self).__init__()
        self.similarity = CosineS()
        self.conv = nn.Sequential(nn.Conv2d(in_ch * 2, in_ch, 3, padding=1),
            nn.BatchNorm2d(in_ch), nn.ReLU(inplace=True), nn.Conv2d(in_ch, 
            in_ch // in_ch_ratio, 3, padding=1), nn.BatchNorm2d(in_ch //
            in_ch_ratio), nn.ReLU(inplace=True))
        self.conv.apply(weights_init_kaiming)
        if attention_name == 'SELayer':
            self.attention_name = SELayer(in_ch // in_ch_ratio)
        if attention_name == 'SKConv':
            self.attention_name = SKConv(in_ch // in_ch_ratio, in_ch //
                in_ch_ratio)

    def forward(self, x1, x2):
        batch_size = x1.shape[0]
        f = self.similarity(x1, x2)
        f = self.conv(f)
        y = self.attention_name(f)
        return y


class LocallyAdap2(nn.Module):
    """ locally adaptive  """

    def __init__(self, in_ch):
        super(LocallyAdap2, self).__init__()
        self.similarity = CosineS()
        self.conv = nn.Sequential(nn.Conv2d(in_ch * 2, in_ch, 3, padding=1),
            nn.BatchNorm2d(in_ch), nn.ReLU(inplace=True), nn.Conv2d(in_ch, 
            in_ch // 2, 3, padding=1), nn.BatchNorm2d(in_ch // 2), nn.ReLU(
            inplace=True))
        self.conv.apply(weights_init_kaiming)
        self.selayer = SELayer(in_ch // 2)

    def forward(self, x1, x2):
        batch_size = x1.shape[0]
        f = self.similarity(x1, x2)
        f = self.conv(f)
        y = self.selayer(f)
        return y


class FixedBatchNorm(nn.BatchNorm2d):

    def forward(self, input):
        return paddle.nn.functional.batch_norm(x=input, running_mean=self.
            running_mean, running_var=self.running_var, weight=self.weight,
            bias=self.bias, training=False, epsilon=self.eps)
