# -*- coding: utf-8 -*-
# ===========================================
# @Time    : 2021/9/8 上午11:02
# @Author  : shutao
# @FileName: modules.py
# @remark  : 
# 
# @Software: PyCharm
# Github 　： https://github.com/NameLacker
# ===========================================

import paddle
import paddle.nn as nn


class Conv2D(nn.Layer):
    def __init__(self,
                 in_channels,
                 out_channels,
                 kernel_size,
                 stride=1,
                 padding=0 if True else [],
                 groups=1,
                 bn=True,
                 act=True):
        """
        卷积 --> 归一化 --> Relu激活
        :param in_channels:　输入层数
        :param out_channels:　输出层数
        :param kernel_size:　卷积核尺寸
        :param stride:　步长
        :param padding:　补0外衬
        :param bn:  是否使用 BN 层
        :param act:  是否使用 act 层
        """
        super(Conv2D, self).__init__()
        self.bn = bn
        self.act = act

        self.conv = nn.Conv2D(in_channels, out_channels, kernel_size, stride, padding, groups=groups)
        self.bn = nn.BatchNorm(out_channels)
        self.relu = nn.ReLU()

    def forward(self, x):
        """
        前向传播
        :param x: 输入
        :return: 输出
        """
        net = self.conv(x)
        if self.bn:
            net = self.bn(net)
        if self.act:
            net = self.relu(net)
        return net


class DeConv2D(nn.Layer):
    def __init__(self, in_channels, out_channels, stride=2):
        """
        反卷积 --> 归一化 --> Relu激活
        :param out_channels:　输出层数
        :param stride:　步长
        """
        super(DeConv2D, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.stride = stride

        self.deconv = nn.Upsample(scale_factor=self.stride)
        self.bn = nn.BatchNorm(self.out_channels)
        self.relu = nn.ReLU()

    def forward(self, x):
        """
        前向传播
        :param x: 输入
        :return: 输出
        """
        net = self.deconv(x)
        net = self.bn(net)
        net = self.relu(net)
        return net


class AggregationModule(nn.Layer):
    def __init__(self, in_channels, out_channels, kernel_size):
        super(AggregationModule, self).__init__()
        self.in_channels = in_channels
        self.out_channels = out_channels
        self.kernel_size = kernel_size

        padding = kernel_size // 2

        self.reduce_conv = Conv2D(in_channels, out_channels, 3, 1, 1)

        self.t1 = Conv2D(in_channels=out_channels,
                         out_channels=out_channels,
                         kernel_size=(kernel_size, 1),
                         padding=[padding, 0],
                         groups=out_channels,
                         bn=False, act=False)
        self.t2 = Conv2D(in_channels=out_channels,
                         out_channels=out_channels,
                         kernel_size=(1, kernel_size),
                         padding=[0, padding],
                         groups=out_channels,
                         bn=False, act=False)

        self.p1 = Conv2D(in_channels=out_channels,
                         out_channels=out_channels,
                         kernel_size=(1, kernel_size),
                         padding=[0, padding],
                         groups=out_channels,
                         bn=False, act=False)
        self.p2 = Conv2D(in_channels=out_channels,
                         out_channels=out_channels,
                         kernel_size=(kernel_size, 1),
                         padding=[padding, 0],
                         groups=out_channels,
                         bn=False, act=False)

        self.norm = nn.BatchNorm(out_channels)
        self.relu = nn.ReLU()

    def forward(self, x):
        x = self.reduce_conv(x)
        x1 = self.t1(x)
        x1 = self.t2(x1)

        x2 = self.p1(x)
        x2 = self.p2(x2)

        out = self.relu(self.norm(x1 + x2))
        return out
