# TODO: backbone: densenetBC-169构建
# DATE: 2023/3/11
# AUTHOR: Cheng Ze WUST

import torch
import torch.nn as nn
import torch.nn.functional as F
from collections import OrderedDict
import numpy as np


# DenseblockBC-169特有的结构，即4个denseblock内所包含的层数，每层包含2个卷积
block_num = [6, 12, 32, 32]


class denselayer(nn.Sequential):
    def __init__(self, inchannel, growth_rate, bn_size, drop_rate=0):
        super(denselayer, self).__init__()
        self.add_module("bn1", nn.BatchNorm2d(inchannel))
        self.add_module("relu1", nn.ReLU(inplace=True))
        self.add_module("conv1", nn.Conv2d(inchannel, bn_size * growth_rate, kernel_size=1, stride=1, bias=False))
        self.add_module("bn2", nn.BatchNorm2d(bn_size * growth_rate))
        self.add_module("relu2", nn.ReLU(inplace=True))
        self.add_module("conv2",
                        nn.Conv2d(bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=False))

        self.drop_rate = drop_rate

    def forward(self, x):
        new_features = super(denselayer, self).forward(x)
        if self.drop_rate > 0:
            new_features = F.dropout(new_features, p=self.drop_rate)
        # 在通道维上将输入和输出连结
        return torch.cat([x, new_features], 1)


class DenseNet(nn.Module):
    def __init__(self, layers):
        super(DenseNet, self).__init__()
        self.initchannel = 64
        # stem block
        self.initlayer = nn.Sequential(
            nn.BatchNorm2d(3),
            nn.Conv2d(3, self.initchannel, kernel_size=3, stride=2, padding=1),
            nn.BatchNorm2d(self.initchannel),
            nn.ReLU(inplace=True),
            nn.Conv2d(self.initchannel, self.initchannel, kernel_size=3, stride=1, padding=1),
            nn.BatchNorm2d(self.initchannel),
            nn.ReLU(inplace=True),
            nn.Conv2d(self.initchannel, self.initchannel, kernel_size=3, stride=1, padding=1),
            nn.AvgPool2d(kernel_size=2, stride=2)
        )

        channels = [64, 256, 128, 512, 256, 1280, 640, 1664]
        # DenseBlock(1):64,75,75 -> 256,75,75
        self.denseblock1 = self._make_layer(channels[0], layers[0])
        # self.denseblock1 = make_cspdenseblock(channels[0], layers[0])
        # (t(1):256, 75, 75 -> 128, 37, 37)
        self.transition1 = self._make_layer_t(channels[1])
        # DenseBlock(2):128,37,37 -> 512,37,37
        self.denseblock2 = self._make_layer(channels[2], layers[1])
        # (t(2):512, 37, 37 -> 256, 18, 18)
        self.transition2 = self._make_layer_t(channels[3])
        # DenseBlock(3):256,18,18 -> 1280,18,18
        self.denseblock3 = self._make_layer(channels[4], layers[2])
        # (t(3):1280,18,18 -> 640,9,9)
        self.transition3 = self._make_layer_t(channels[5])
        # DenseBlock(4):640,9,9 -> 1664,9,9
        self.denseblock4 = self._make_layer(channels[6], layers[3])

    def _make_layer(self, inchannel, layer_num):
        # denselayer堆叠
        layers = []
        growth_rate = 32
        for i in range(0, layer_num):
            layers.append(("dense_{}".format(i), denselayer(inchannel + i * growth_rate, growth_rate, bn_size=4)))
            layer_num += layer_num * growth_rate
        return nn.Sequential(OrderedDict(layers))

    def _make_layer_t(self, channel):
        # transition layer堆叠
        layers = [("transition_bn", nn.BatchNorm2d(channel)),
                  ("transition_relu", nn.ReLU(inplace=True)),
                  ("transition_conv", nn.Conv2d(channel, int(channel * 0.5), kernel_size=1)),
                  ("transition_pool", nn.AvgPool2d(kernel_size=2, stride=2))]
        return nn.Sequential(OrderedDict(layers))

    def forward(self, x):
        x = self.initlayer(x)

        x = self.denseblock1(x)
        x = self.transition1(x)
        x = self.denseblock2(x)
        x = self.transition2(x)
        x = self.denseblock3(x)
        x = self.transition3(x)
        # x = self.denseblock4(x)

        feature_list = []
        for (name, module) in self.denseblock4.named_children():
            name_list = ['dense_4', 'dense_9', 'dense_14', 'dense_19', 'dense_24', 'dense_31']
            x = module(x)
            if name in name_list:
                feature_list.append(x)

        return x, feature_list


def densenet169(pretrained=False):
    model = DenseNet(block_num)
    print("Load densenet...")
    if pretrained:
        print("Load pretrained densenet...")
        _device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
        model_dict = model.state_dict()
        pretrained_dict = torch.load("model_data/...", map_location=_device)
        load_key, no_load_key, temp_dict = [], [], {}
        for k, v in pretrained_dict.items():
            if k in model_dict.keys() and np.shape(model_dict[k]) == np.shape(v):
                temp_dict[k] = v
                load_key.append(k)
            else:
                no_load_key.append(k)
        model_dict.update(temp_dict)
        model.load_state_dict(model_dict, False)
        print("\nSuccessful Load Key:", str(load_key)[:500], "……\nSuccessful Load Key Num:", len(load_key))
        print("\nFail To Load Key:", str(no_load_key)[:500], "……\nFail To Load Key num:", len(no_load_key))
    return model


if __name__ == "__main__":
    net = densenet169(pretrained=False)

    # 查看网络结构
    print(net)

    # 查看网络各层参数
    from torchstat import stat
    # stat(net, input_size=(3, 300, 300))
