import mindspore.nn as nn


class ResBlock3DS(nn.Cell):
    def __init__(self, C, W, C1, S):
        super(ResBlock3DS, self).__init__()
        self.seq = nn.SequentialCell(
            [
                nn.Conv2d(C, C1, 1, stride=S),
                nn.BatchNorm2d(C1),
                nn.ReLU(),
                nn.Conv2d(C1, C1, 3, stride=1),
                nn.BatchNorm2d(C1),
                nn.ReLU(),
                nn.Conv2d(C1, C1 * 4, 1, stride=1),
                nn.BatchNorm2d(C1 * 4),
            ])
        self.shortup = nn.SequentialCell(
            [
                nn.Conv2d(C, C1 * 4, 1, stride=S),
                nn.BatchNorm2d(C1 * 4),
            ])
        self.relu = nn.ReLU()

    def construct(self, x):
        y = self.seq(x) + self.shortup(x)
        return self.relu(y)


class ResBlock3RT(nn.Cell):
    def __init__(self, C, W):
        super(ResBlock3RT, self).__init__()
        self.seq = nn.SequentialCell(
            [
                nn.Conv2d(C, C // 4, 1, stride=1),
                nn.BatchNorm2d(C // 4),
                nn.ReLU(),
                nn.Conv2d(C // 4, C // 4, 3, stride=1),
                nn.BatchNorm2d(C // 4),
                nn.ReLU(),
                nn.Conv2d(C // 4, C, 1, stride=1),
                nn.BatchNorm2d(C),
            ])
        self.relu = nn.ReLU()

    def construct(self, x):
        y = self.seq(x)
        return self.relu(x + y)


'''
2 layer residual block with retain size
input:  C,WxW
output: C, WxW
'''


class ResBlock2RT(nn.Cell):
    def __init__(self, in_channel):
        super(ResBlock2RT, self).__init__()
        self.seq = nn.SequentialCell(
            [
                nn.Conv2d(in_channel, in_channel, kernel_size=3, stride=1),
                nn.BatchNorm2d(in_channel),
                nn.ReLU(),
                nn.Conv2d(in_channel, in_channel, kernel_size=3, stride=1),
                nn.BatchNorm2d(in_channel),
            ])
        self.relu = nn.ReLU()

    def construct(self, x):
        x = self.seq(x) + x
        return self.relu(x)


'''
2 layer residual block with down-samping
input  C0,WxW
output C1,W/SxW/S
'''


class ResBlock2DS(nn.Cell):
    def __init__(self, in_channel, out_channel, stride):
        super(ResBlock2DS, self).__init__()
        self.seq = nn.SequentialCell(
            [
                nn.Conv2d(in_channel, out_channel, 3, stride=stride),
                nn.BatchNorm2d(out_channel),
                nn.ReLU(),
                nn.Conv2d(out_channel, out_channel, 3, stride=1),
                nn.BatchNorm2d(out_channel),
            ])
        self.shortup = nn.SequentialCell(
            [
                nn.Conv2d(in_channel, out_channel, 1, stride=stride),
                nn.BatchNorm2d(out_channel),
            ])
        self.relu = nn.ReLU()

    def construct(self, x):
        y = self.seq(x) + self.shortup(x)
        return self.relu(y)
