import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers, Model

#（1）构建一个dense单元
def DenseLayer(x, growth_rate, dropout_rate=0.2):
    
    # BN+激活+1*1卷积降维
    x = layers.BatchNormalization()(x)
    x = layers.Activation('relu')(x)
    x = layers.Conv2D(filters = growth_rate*4,  # 降低特征图数量
                      kernel_size = (1,1),
                      strides = 1,
                      padding = 'same')(x)
    
    # BN+激活+3*3卷积
    x = layers.BatchNormalization()(x)
    x = layers.Activation('relu')(x)
    x = layers.Conv2D(filters = growth_rate,
                      kernel_size = (3,3),
                      strides = 1,
                      padding = 'same')(x)

    # 随机杀死神经元
    x = layers.Dropout(rate = dropout_rate)(x)

    return x

#（2）构建DenseBlock的多个卷积组合在一起的卷积块
def DenseBlock(x, num, growth_rate):

    # 重复执行多少次DenseLayer
    for _ in range(num):
        conv = DenseLayer(x, growth_rate)
        # 将前面所有层的特征堆叠后传到下一层
        x = layers.Concatenate()([x, conv])
    
    return x


#（3）Transition层连接两个相邻的DenseBlock
def Transition(x, compression_rate=0.5):
    
    # 1*1卷积下降一半的通道数
    out_channel = int(x.shape[-1] * compression_rate)

    # BN+激活+1*1卷积+2*2平均池化
    x = layers.BatchNormalization()(x)
    x = layers.Activation('relu')(x)
    x = layers.Conv2D(filters = out_channel,  # 输出通道数
                      kernel_size = (1,1),
                      strides = 1,
                      padding = 'same')(x)
    x = layers.AveragePooling2D(pool_size = (2,2),
                                strides = 2,  # 下采样
                                padding = 'same')(x)
    return x


#（4）主干网络架构
def densenet(input_shape, classes, growth_rate, include_top):

    # 构造输入层[224,224,3]
    inputs = keras.Input(shape=input_shape)

    # 卷积下采样[224,224,3]==>[112,112,64]
    x = layers.Conv2D(filters = 2*growth_rate,  # 输出特征图个数为两倍增长率
                      kernel_size = (7,7),
                      strides = 2,
                      padding = 'same')(inputs)
    
    x = layers.BatchNormalization()(x)
    x = layers.Activation('relu')(x)

    # 最大池化[112,112,64]==>[56,56,64]
    x = layers.MaxPooling2D(pool_size = (3,3), 
                            strides = 2, 
                            padding = 'same')(x)

    # [56,56,64]==>[56,56,64+6*32]
    x = DenseBlock(x, num=6,  growth_rate=growth_rate)
    # [56,56,256]==>[28,28,128]
    x = Transition(x)
    # [28,28,128]==>[28,28,128+12*32]
    x = DenseBlock(x, num=12, growth_rate=growth_rate)
    # [28,28,512]==>[14,14,256]
    x = Transition(x)
    # [14,14,256]==>[14,14,256+24*32]
    x = DenseBlock(x, num=24, growth_rate=growth_rate)
    # [14,14,1024]==>[7,7,512]
    x = Transition(x)
    # [7,7,512]==>[7,7,512+16*32]
    x = DenseBlock(x, num=16, growth_rate=growth_rate)

    # 导入模型时，是否包含输出层
    if include_top is True:

        # [7,7,1024]==>[None,1024]
        x = layers.GlobalAveragePooling2D()(x)
        # [None,1024]==>[None,classes]
        x = layers.Dense(classes)(x)  # 不经过softmax层转换成概率

    # 构造模型
    model = Model(inputs, x)

    return model

#（5）接收网络模型
if __name__ == '__main__':

    model = densenet(input_shape=[224,224,3],  # 输入图像的shape
                     classes = 1000,  # 分类数
                     growth_rate = 32,  # 设置增长率，即每个dense模块的输出通道数
                     include_top = True)  # 默认包含输出层

    model.summary()  # 查看网络架构
    print(len(model.layers))  # 查看一共有几层

        
    
    
    