"""
ResNet模型代码
"""
import numpy as np
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from django.contrib.contenttypes.views import shortcut
from paddle.vision.models.densenet import ConvBNLayer


# 定义ResNet模型
class ResNet(paddle.nn.Layer):
    def __init__(self, layers=50, class_dim=1):
        """
        :param layers: 网络层数，可以是50,101或者512
        :param class_dim: 分类标签的类别数
        """
        super(ResNet, self).__init__()
        self.layers = layers
        supported_layers = [50,101,152]
        assert layers in supported_layers, 'supported layers are {} but input layer is {}'.format(supported_layers,layers)

        if layers == 50:
            # ResNet50包含多个模块，其中第2到第5个模块分别包含3、4、6 3个残差块
            depth = [3,4,6,3]
        elif layers == 101:
            # ResNet101包含多个模块，其中第2到第5个模块分别包含3、4、23 3个残差块
            depth = [3,4,23,3]
        elif layers == 152:
            # ResNet152包含多个模块，其中第2到第5个模块分别包含3、86 3个参数块
            depth = [3,8,36,3]

        # 残差块中使用到的卷积的输出通道数
        num_filters = [64,128,256,512]

        # ResNet的第一个模块，包含1个7x7卷积，后面跟着1个最大池化层
        self.conv = ConvBNLayer(num_channels=3,
                                num_filters=64,
                                filter_size=7,
                                stride=2,
                                act='relu')
        self.pool2d_max = nn.MaxPool2D(kernel_size=3,
                                       stride=2,
                                       padding=1)

        # ResNet的第二到第五个模块c2,c3,c4,c5
        self.bottleneck_block_list = []
        num_channels = 64
        for block in range(len(depth)):
            shortcut = False
            for i in range(depth[block]):
                # c3,c4,c5将会在第一个残差块使用stride=2;其余所有残差块stride=1
                bottleneck_block = self.add_sublayer(
                    'bb_%d_%d' % (block,i),
                    BottleneckBlock(
                        num_channels=num_channels,
                        num_filters = num_filters,
                        stride = 2 if i == 0 and block != 0 else 1,
                        shortcut=shortcut
                    )
                )
                num_channels  = bottleneck_block._num_channels_out
                self.bottleneck_block_list.append(bottleneck_block)
                shortcut = True

        # 在c5的输出特征图上使用全局池化
        self.pool2d_avg = paddle.nn.AdaptiveAvgPool2D(output_size=1)

        # stdv用来作为全连接层随机初始化参数的方差
        import math
        stdv = 1.0 / math.sqrt(2048 * 1.0)

        # 创建全连接层，输出大小为类别数目，经过残差网络的卷积核全局池化后
        # 卷积特征的维度是[B,2048,1,1],故最后一层全连接的输入维度是2048
        self.out = nn.Linear(in_features=2048,out_features=class_dim,
                             weight_attr=paddle.ParamAttr(
                                 initializer=paddle.nn.initializer.Uniform(-stdv,stdv)
                             ))

        
    def forward(self,inputs):
        y = self.conv(inputs)
        y = self.pool2d_max(y)
        for bottleneck_block in self.bottleneck_block_list:
            y = bottleneck_block(y)
        y = self.pool2d_avg(y)
        y = paddle.reshape(y,[y.shape[0],-1])
        y = self.out(y)
        return y