from torch import nn


class DownConv(nn.Module):
    def __init__(self, input_channel, out_channel, padding, conv_kernel_size=3, conv_stride=1, pooling_kernel_size=2,
                 need_pooling=True):
        super(DownConv, self).__init__()
        # the first layer
        self.need_pooling = need_pooling
        self.conv1 = nn.Conv2d(in_channels=input_channel, out_channels=out_channel, kernel_size=conv_kernel_size,
                               stride=conv_stride, padding_mode="reflect", padding=padding)
        self.BN1 = nn.BatchNorm2d(out_channel)
        self.Relu1 = nn.ReLU()
        self.conv2 = nn.Conv2d(in_channels=out_channel, out_channels=out_channel, kernel_size=conv_kernel_size,
                               stride=conv_stride, padding_mode="reflect", padding=padding)
        self.BN2 = nn.BatchNorm2d(out_channel)
        self.Relu2 = nn.ReLU()
        # self.max_pooling1 = nn.MaxPool2d(kernel_size=pooling_kernel_size)
        self.max_pooling1 = nn.MaxPool2d(kernel_size=(pooling_kernel_size, pooling_kernel_size))

    def forward(self, x):
        if self.need_pooling:
            x = self.max_pooling1(x)
        x = self.conv1(x)
        x = self.BN1(x)
        x = self.Relu1(x)
        x = self.conv2(x)
        x = self.BN2(x)
        x = self.Relu2(x)
        return x


class UpConv(nn.Module):
    def __init__(self, input_channel, out_channel, padding, conv_kernel_size=3, conv_stride=1, up_conv_kernel_size=2,
                 if_up_conv=True, need_pooling=False, pooling_kernel_size=2):
        super(UpConv, self).__init__()
        self.if_up_conv = if_up_conv
        self.need_pooling = need_pooling
        self.one_conv = nn.Conv2d(in_channels=out_channel, out_channels=1, kernel_size=1)
        self.max_pooling1 = nn.MaxPool2d(kernel_size=pooling_kernel_size)

        self.conv1 = nn.Conv2d(in_channels=input_channel, out_channels=out_channel, kernel_size=conv_kernel_size,
                               stride=conv_stride, padding_mode="reflect", padding=padding, bias=False)
        self.BN1 = nn.BatchNorm2d(out_channel)
        self.Relu1 = nn.ReLU()
        self.conv2 = nn.Conv2d(in_channels=out_channel, out_channels=out_channel, kernel_size=conv_kernel_size,
                               stride=conv_stride, padding_mode="reflect", padding=padding, bias=False)
        self.BN2 = nn.BatchNorm2d(out_channel)
        self.Relu2 = nn.ReLU()
        self.up_conv_trans1 = nn.ConvTranspose2d(in_channels=out_channel, out_channels=int(out_channel / 2),
                                                 kernel_size=up_conv_kernel_size, stride=2)

    def forward(self, x):
        if self.need_pooling:
            x = self.max_pooling1(x)
        x = self.conv1(x)
        x = self.BN1(x)
        x = self.Relu1(x)
        x = self.conv2(x)
        x = self.BN2(x)
        x = self.Relu2(x)
        if self.if_up_conv:
            x = self.up_conv_trans1(x)
        # else:
        #     x = self.one_conv(x)
        return x

