import paddle
import numpy as np

# xception模型改进
# https://blog.csdn.net/yangchengtest/article/details/81705032
# class SeparableConv2D(paddle.nn.Layer):
#     def __init__(self, in_channels, out_channels, k=1, s=1, p=0):
#         super(SeparableConv2D, self).__init__()
#
#         self.conv1 = paddle.nn.Conv2D(in_channels, in_channels, k, s, p, groups=in_channels, bias_attr=False)
#         self.pointwise = paddle.nn.Conv2D(in_channels, out_channels, 1, 1, 0, 1, 1, bias_attr=False)
#
#     def forward(self, x):
#         x = self.conv1(x)
#         x = self.pointwise(x)
#         return x


# class SeparableConv2D(paddle.nn.Layer):
#     def __init__(self, in_channels, out_channels, k=1, s=1, p=0):
#         super(SeparableConv2D, self).__init__()
#
#         self.conv1 = paddle.nn.Conv2D(in_channels, in_channels, k, s, p, groups=in_channels, bias_attr=False)
#         self.pointwise = paddle.nn.Conv2D(in_channels, out_channels, 1, 1, 0, 1, 1, bias_attr=False)
#
#     def forward(self, x):
#         x = self.conv1(x)
#         x = self.pointwise(x)
#         return x


# class SepConvBlock(paddle.nn.Layer):
#     def __init__(self, in_channels, out_channels, k=1, s=1, p=0):
#         super(SepConvBlock, self).__init__()
#         self.relu = paddle.nn.ReLU()
#         self.sepconv = SeparableConv2D(in_channels, out_channels, k=k, s=s, p=p)
#         self.bn = paddle.nn.BatchNorm2D(out_channels)
#
#     def forward(self, x):
#         out = self.bn(self.sepconv(self.relu(x)))
#         return out

class SepConvBlock(paddle.nn.Layer):
    def __init__(self, in_channels, out_channels, k=1, s=1, dilation=1, activation=False):
        super(SepConvBlock, self).__init__()
        self.conv1 = paddle.nn.Conv2D(in_channels, in_channels, k, s, (k//2)*dilation, dilation, groups=in_channels, bias_attr=False)
        self.pointwise = paddle.nn.Conv2D(in_channels, out_channels, 1, 1, 0, 1, 1, bias_attr=False)
        # self.sepconv = SeparableConv2D(in_channels, out_channels, k=k, s=s, p=p)
        self.bn1 = paddle.nn.BatchNorm2D(in_channels)
        self.bn2 = paddle.nn.BatchNorm2D(out_channels)
        if activation:
            self.activation1 = paddle.nn.ReLU()
            self.activation2 = paddle.nn.ReLU()
        else:
            self.activation1 = None
            self.activation2 = None
    def forward(self, x):
        x = self.bn1(self.conv1(x))
        if self.activation1 is not None:
            x = self.activation1(x)
        x = self.bn2(self.pointwise(x))
        if self.activation2 is not None:
            x = self.activation2(x)
        return x


class Block(paddle.nn.Layer):
    def __init__(self, channels, activation_fn_in_separable_conv=False):
        super(Block, self).__init__()

        self.sepconv2 = SepConvBlock(channels, channels, k=3, s=1)
        self.sepconv1 = SepConvBlock(channels, channels, k=3, s=1)
        self.sepconv3 = SepConvBlock(channels, channels, k=3, s=1)
    def forward(self, x):
        org_x = x
        x = self.sepconv1(x)
        x = self.sepconv2(x)
        out = self.sepconv3(x)
        out += org_x
        return out


class DownBlock(paddle.nn.Layer):
    def __init__(self, in_channels, out_channels, activation_fn_in_separable_conv=False):
        super(DownBlock, self).__init__()
        self.activation_fn_in_separable_conv = activation_fn_in_separable_conv
        if self.activation_fn_in_separable_conv:
            # relu激活函数需要在可分离卷积层调用
            self.sepconv1 = SepConvBlock(in_channels, out_channels, k=3, s=1, activation=True)
            self.sepconv2 = SepConvBlock(out_channels, out_channels, k=3, s=1, activation=True)
            # 这里的实现可以把pooling去掉，直接用stride=2的SepConvBlock代替
            self.sepconv3 = SepConvBlock(out_channels, out_channels, k=3, s=2, activation=True)
        else:
            self.sepconv1 = SepConvBlock(in_channels, out_channels, k=3, s=1)
            self.sepconv2 = SepConvBlock(out_channels, out_channels, k=3, s=1)
            # 这里的实现可以把pooling去掉，直接用stride=2的SepConvBlock代替
            self.sepconv3 = SepConvBlock(out_channels, out_channels, k=3, s=2)
            self.relu1 = paddle.nn.ReLU()
            self.relu2 = paddle.nn.ReLU()
            self.relu3 = paddle.nn.ReLU()
        self.skip = paddle.nn.Conv2D(in_channels, out_channels, 1, stride=2)

    def forward(self, x):
        if self.activation_fn_in_separable_conv:
            skip = self.skip(x)
            identify = self.sepconv1(x)
            out = self.sepconv3(self.sepconv2(identify))
            out += skip
        else:
            skip = self.skip(x)
            identify = self.relu1(self.sepconv1(x))
            out = self.relu3(self.sepconv3(self.relu2(self.sepconv2(identify))))
            out += skip
        return out, identify


# class Xception(paddle.nn.Layer):
#
#     def __init__(self, num_classes=10):
#         super(Xception, self).__init__()
#
#         self.conv1 = paddle.nn.Conv2D(3, 32, 3, 2, 0, bias_attr=False)
#         self.bn1 = paddle.nn.BatchNorm2D(32)
#         # self.relu1 = paddle.nn.ReLU()
#
#         self.conv2 = paddle.nn.Conv2D(32, 64, 3, padding=1, bias_attr=False)
#         self.bn2 = paddle.nn.BatchNorm2D(64)
#
#         self.block1 = DownBlock(64, 128)
#         self.block2 = DownBlock(128, 256)
#         self.block3 = DownBlock(256, 728)
#
#         self.mid_flow_block_list = []
#         for i in range(16):
#             self.mid_flow_block_list.append(Block(728))
#
#         self.sepconv12 = SepConvBlock(728, 728, k=3, s=1, p=1)
#         self.sepconv13 = SepConvBlock(728, 1024, k=3, s=1, p=1)
#         self.sepconv14 = SepConvBlock(1024, 1024, k=3, s=2, p=1)
#         # self.pool14 = paddle.nn.MaxPool2D(3, stride=2, padding=1)
#         self.skip15 = paddle.nn.Conv2D(728, 1024, 1, stride=2)
#
#         self.conv16 = SeparableConv2D(1024, 1536, 3, 1, 1)
#         self.bn16 = paddle.nn.BatchNorm2D(1536)
#         self.relu16 = paddle.nn.ReLU()
#
#         self.conv17 = SeparableConv2D(1536, 1536, 3, 1, 1)
#         self.bn17 = paddle.nn.BatchNorm2D(1536)
#         self.relu17 = paddle.nn.ReLU()
#
#         # do relu here
#         self.conv18 = SeparableConv2D(1536, 2048, 3, 1, 1)
#         self.bn18 = paddle.nn.BatchNorm2D(2048)
#         self.relu18 = paddle.nn.ReLU()
#
#         self.fc = paddle.nn.Linear(2048, num_classes)
#
#     def forward(self, x):
#         # Entry flow
#         # x = self.relu1(self.bn1(self.conv1(x)))
#         x = self.bn1(self.conv1(x))
#
#         x = self.bn2(self.conv2(x))
#
#         x, _ = self.block1(x)
#         x, entry_flow_skip = self.block2(x)
#         x, _ = self.block3(x)
#         # Middle flow, repeat 16 times
#         for block in self.mid_flow_block_list:
#             x = block(x)
#         # Exit flow
#         # out = self.pool14(self.sepconv13(self.sepconv12(x))) # pool改为sepconv
#         out = self.sepconv14(self.sepconv13(self.sepconv12(x)))
#         skip = self.skip15(x)
#         x = out + skip
#
#         x = self.relu16(self.bn16(self.conv16(x)))
#         # 在这里需要新加一个sepconv，输入输出都是1536
#         x = self.relu17(self.bn17(self.conv17(x)))
#         x = self.relu18(self.bn18(self.conv18(x)))
#
#         x = paddle.nn.AdaptiveAvgPool2D((1, 1))(x)
#         # x = x.view(x.size(0), -1)
#         # x = paddle.reshape(x, shape=[1, -1])
#         # x = self.fc(x)
#
#         return x, entry_flow_skip

class Xception(paddle.nn.Layer):

    def __init__(self, num_classes=10):
        super(Xception, self).__init__()

        self.conv1 = paddle.nn.Conv2D(3, 32, 3, 2, 0, bias_attr=False)
        self.bn1 = paddle.nn.BatchNorm2D(32)
        # self.relu1 = paddle.nn.ReLU()

        self.conv2 = paddle.nn.Conv2D(32, 64, 3, padding=1, bias_attr=False)
        self.bn2 = paddle.nn.BatchNorm2D(64)

        self.block1 = DownBlock(64, 128)
        self.block2 = DownBlock(128, 256)
        self.block3 = DownBlock(256, 728)

        self.mid_flow_block_list = []
        # for i in range(16):
        #     self.mid_flow_block_list.append(Block(728))
        self.mid_block1 = Block(728)
        self.mid_block2 = Block(728)
        self.mid_block3 = Block(728)
        self.mid_block4 = Block(728)
        self.mid_block5 = Block(728)
        self.mid_block6 = Block(728)
        self.mid_block7 = Block(728)
        self.mid_block8 = Block(728)
        self.mid_block9 = Block(728)
        self.mid_block10 = Block(728)
        self.mid_block11 = Block(728)
        self.mid_block12 = Block(728)
        self.mid_block13 = Block(728)
        self.mid_block14 = Block(728)
        self.mid_block15 = Block(728)
        self.mid_block16 = Block(728)

        self.sepconv12 = SepConvBlock(728, 728, k=3, s=1)
        self.sepconv13 = SepConvBlock(728, 1024, k=3, s=1)
        # sepconv14不下采样，后面改用空洞卷积增大感受野
        self.sepconv14 = SepConvBlock(1024, 1024, k=3, s=1)
        # skip15不进行下采样
        self.skip15 = paddle.nn.Conv2D(728, 1024, 1, stride=1)

        self.conv16 = SepConvBlock(1024, 1536, 3, 1, dilation=2, activation=True)

        self.conv17 = SepConvBlock(1536, 1536, 3, 1, dilation=2, activation=True)

        self.conv18 = SepConvBlock(1536, 2048, 3, 1, dilation=2, activation=True)

        self.fc = paddle.nn.Linear(2048, num_classes)
        self.avgpool2d = paddle.nn.AdaptiveAvgPool2D((1, 1))

    def forward(self, x):
        # Entry flow
        # x = self.relu1(self.bn1(self.conv1(x)))
        x = self.bn1(self.conv1(x))

        x = self.bn2(self.conv2(x))

        x, _ = self.block1(x)
        x, entry_flow_skip = self.block2(x)
        x, _ = self.block3(x)
        # Middle flow, repeat 16 times
        # for block in self.mid_flow_block_list:
        #     x = block(x)
        x = self.mid_block1(x)
        x = self.mid_block2(x)
        x = self.mid_block3(x)
        x = self.mid_block4(x)
        x = self.mid_block5(x)
        x = self.mid_block6(x)
        x = self.mid_block7(x)
        x = self.mid_block8(x)
        x = self.mid_block9(x)
        x = self.mid_block10(x)
        x = self.mid_block11(x)
        x = self.mid_block12(x)
        x = self.mid_block13(x)
        x = self.mid_block14(x)
        x = self.mid_block15(x)
        x = self.mid_block16(x)
        # Exit flow
        # out = self.pool14(self.sepconv13(self.sepconv12(x))) # pool改为sepconv
        out = self.sepconv14(self.sepconv13(self.sepconv12(x)))
        skip = self.skip15(x)
        x = out + skip

        x = self.conv16(x)
        # 在这里需要新加一个sepconv，输入输出都是1536
        x = self.conv17(x)
        x = self.conv18(x)

        x = self.avgpool2d(x)
        # x = x.view(x.size(0), -1)
        # x = paddle.reshape(x, shape=[1, -1])
        # x = self.fc(x)

        return x, entry_flow_skip
model = Xception(10)
inputs = np.zeros((1, 3, 299, 299))
outputs, _ = model(paddle.to_tensor(inputs, dtype='float32'))
print(outputs.shape)