"""
dense net in pytorch
https://arxiv.org/abs/1608.06993v5
"""

import torch
import torch.nn as nn


class DenseLayer(nn.Module):
    def __init__(self, in_channels, growth_rate, scale):
        super().__init__()
        self.conv1 = nn.Sequential(
            nn.BatchNorm2d(in_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(in_channels, growth_rate * scale, kernel_size=1, stride=1, bias=False),
        )

        self.conv2 = nn.Sequential(
            nn.BatchNorm2d(growth_rate * scale),
            nn.ReLU(inplace=True),
            nn.Conv2d(growth_rate * scale, growth_rate, kernel_size=3, stride=1, padding=1, bias=False)
        )

    def forward(self, x):
        out = self.conv1(x)
        out = self.conv2(out)
        return torch.cat([x, out], 1)


class DenseBlock(nn.Module):
    def __init__(self, number_layers, in_channels, growth_rate, scale):
        super().__init__()
        layers = []
        for i in range(number_layers):
            layer = DenseLayer(in_channels + i * growth_rate, growth_rate, scale)
            layers.append(layer)
        self.block = nn.Sequential(*layers)

    def forward(self, x):
        out = self.block(x)
        return out


# 我们将块之间的层称为过渡层，它进行卷积和池化。
class Transition(nn.Module):
    def __init__(self, in_channels, out_channels):
        super().__init__()
        # 在我们的批归一化层和1×1卷积层以及2×2平均池化层实验中使用的过渡层
        self.trans = nn.Sequential(
            nn.BatchNorm2d(in_channels),
            nn.ReLU(inplace=True),
            nn.Conv2d(in_channels, out_channels, 1, stride=1, bias=False),
            nn.AvgPool2d(2, stride=2)
        )

    def forward(self, x):
        return self.trans(x)


class DenseNet(nn.Module):
    def __init__(self, block_config, init_in_channels, growth_rate=32, reduction=0.5, scale=4, num_class=1000):
        super().__init__()

        self.features = nn.Sequential(
            # [B, 3, 224, 224]
            # h_out = (h_in - kernel_size + 2*padding)/stride+1
            nn.Conv2d(3, init_in_channels, kernel_size=7, stride=2, padding=3, bias=False),
            # [B, init_in_channels, 112, 112]
            nn.BatchNorm2d(init_in_channels),
            nn.ReLU(inplace=True),
            nn.MaxPool2d(kernel_size=3, stride=2, padding=1),
            # [B, init_in_channels, 56, 56]
        )
        features_number = init_in_channels
        # 初始化特征数量
        layers = []
        # [6, 12, 48, 32] 64 32      [B, 64, 56, 56]
        for i, num_layers in enumerate(block_config):
            block = DenseBlock(num_layers, features_number, growth_rate, scale)
            layers.append(block)
            features_number = features_number + num_layers * growth_rate  # 更新特征维度数量
            # [B, 6*32+64, 56, 56]  256
            # [B, 12*32+128, 28, 28]  512
            # [B, 48*32+256, 14, 14]  1792
            # [B, 32*32+896, 7, 7]  1920

            if i != len(block_config) - 1:
                transition = Transition(features_number, features_number // 2)
                layers.append(transition)
                features_number = features_number // 2
                # [B, (6*32+64)/2, 28, 28]  128
                # [B, (12*32+64)/2, 14, 14]  256
                # [B, (48*32+64)/2, 7, 7]  896

        layers.append(nn.BatchNorm2d(features_number))
        self.block = nn.Sequential(*layers)
        # [B, 32*32+896, 7, 7]  1920
        self.relu = nn.ReLU(inplace=True)
        self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
        # 任意尺寸的输入特征图变换为大小为 1*1的输出特征图（无论输入的高宽是多少）
        # [B, 32*32+896, 1, 1]  1920
        self.classifier = nn.Linear(features_number, num_class)
        # [B, num_class]  num_class

    def forward(self, x):
        output = self.features(x)
        output = self.block(output)
        output = self.relu(output)
        output = self.avg_pool(output)
        output = output.view(output.size()[0], -1)
        output = self.classifier(output)
        return output


def densenet121(num_class=1000):
    return DenseNet([6, 12, 24, 16], init_in_channels=64, growth_rate=32, num_class=num_class)


def densenet161(num_class=1000):
    return DenseNet([6, 12, 36, 24], init_in_channels=96, growth_rate=48, num_class=num_class)


def densenet169(num_class=1000):
    return DenseNet([6, 12, 32, 32], init_in_channels=64, growth_rate=32, num_class=num_class)


def densenet201(num_class=1000):
    return DenseNet([6, 12, 48, 32], init_in_channels=64, growth_rate=32, num_class=num_class)
