import  torch
from    torch import  nn
from    torch.nn import functional as F


#卷积单元模块
# 卷积初始化 卷积3*3 步长1 0补边    批量归一化    relu
class ConvBNRelu(nn.Module):

    def __init__(self, ch_in, ch_out, kernel_size=3, stride=1, padding=1): #kernel_size:卷积核大小3*3(默认)
        super(ConvBNRelu, self).__init__()

        self.model = nn.Sequential(
            nn.Conv2d(ch_in, ch_out, kernel_size=kernel_size, stride=stride,padding=padding), #卷积
            nn.BatchNorm2d(ch_out),  #批次归一化
            nn.ReLU()  #激活函数relu
        )

    def forward(self, x):
        """

        :param x: [b, ch, h, w]
        :return:
        """
        out = self.model(x)

        return out

# 定义inception模块
# 1. inception基本模块构成
# (1) 1*1卷积核,减少通道数,参数减少,降维
# (2) 每一层有多个不同卷积核堆叠而成，不同层次类别特征进行并联处理，效率提高
class InceptionBlk(nn.Module):

    def __init__(self, ch_in, ch_out, stride=1):
        super(InceptionBlk, self).__init__()
        """
        :param ch_in:
        :param ch_out:
        """
        
        self.ch_in = ch_in  #channel通道数
        self.ch_out = ch_out  #channel通道数
        self.strides = stride   #strides步长

        self.conv1 = ConvBNRelu(ch_in, ch_out, stride=stride)   #卷积模块1:3*3
        self.conv2 = ConvBNRelu(ch_in, ch_out, kernel_size=5, stride=stride,padding=2)  #卷积模块2: 5*5
        self.conv3_1 = ConvBNRelu(ch_in, ch_in, stride=stride) #卷积模块3_1:3*3
        self.conv3_2 = ConvBNRelu(ch_in, ch_out, kernel_size=1, stride=1,padding=0) #卷积模块3_2:1*1

        self.pool = nn.MaxPool2d(kernel_size=3, stride=1, padding=1)  #池化模块:3*3
        self.pool_conv = ConvBNRelu(ch_in, ch_out, stride=stride)   #卷积模块pool: 3*3
    #正向传播
    def forward(self, x):
        x1 = self.conv1(x)

        x2 = self.conv2(x)

        #将3_1的3*3卷积和3_2的1*1卷积串联
        x3_1 = self.conv3_1(x)
        x3_2 = self.conv3_2(x3_1)

        # 将3*3池化和3*3卷积串联
        x4 = self.pool(x)
        x4 = self.pool_conv(x4)

        # concat along axis=channel

        x = torch.cat([x1, x2, x3_2, x4], dim=1)  #在通道方向上堆叠, dim=1通道方向

        return x
#完整的inceptionnet网络模型：
# (1) 初始的卷积层
# (2) 由多个inception层构成：
#     - 每一层由2个基本inception模块组成：
#       第一个inception模块，窗口尺寸减半(strides=2)
#       第二个inception模块，窗口和第一个保持一致(strides=1)
#     - 每层之后通道数加倍
# (3) 全局平均池化+全连接
class Inception(nn.Module):
    # num_layers:Inception层数, num_classes输出类别, init_ch初始通道数
    def __init__(self, num_layers, num_classes, init_ch=16, **kwargs):
        super(Inception, self).__init__()

        self.in_channels = init_ch  #init_ch初始的通道数 in_channels输入通道数，out_channels输出通道数
        self.out_channels = init_ch
        self.num_layers = num_layers  #层数
        self.init_ch = init_ch
        #初始的卷积层
        self.conv1 = ConvBNRelu(init_ch, init_ch)
        # 动态inception模块
        self.blocks = nn.Sequential()

        for block_id in range(num_layers):  #num_layers:层数

            for layer_id in range(2): #每层加入两个inception模块

                if layer_id == 0:  #第一个inception模块，窗口尺寸减半(strides=2)
                    # 步长越大尺寸越小
                    block = InceptionBlk(self.in_channels, self.out_channels, stride=2)
                    self.in_channels = self.out_channels * 4
                else:#第二个inception模块，窗口和第一个保持一致(strides=1)
                    block = InceptionBlk(self.in_channels, self.out_channels, stride=1)
                    self.in_channels = self.out_channels * 4

                #添加inception模块
                self.blocks.add_module(str(block_id)+str(layer_id), block)

            # enlarger out_channels per block
            #每一层结束之后，将通道数加倍
            self.out_channels *= 2
        # 全连接:通道数为最后一次inceptionBlk执行完成后, 输出通道数为当前out_channels的4倍,因为,for循环末尾已经乘以2,故再需乘以2
        self.fc = nn.Linear(self.out_channels*2, num_classes)  #全连接层

    #正向传播
    def forward(self, x):

        out = self.conv1(x)

        out = self.blocks(out)
        # print('before pool:', out.shape)

        # 自适应全局池化：随着输入的窗口大小，自动的设置池化核大小，只要保证最后的窗口为[1，1]即可
        out = F.adaptive_avg_pool2d(out, [1, 1])
        # print('after pool:', out.shape)

        #转化维度
        out = out.view(out.size(0), -1)
        # print("out==",out.shape)

        out = self.fc(out)

        return out

def main():
    # 验证网络是否正确， 测试网络
    blk = InceptionBlk(64, 128, stride=2)
    tmp = torch.randn(2, 64, 32, 32)
    out = blk(tmp)
    print('block:', out.shape)

    x = torch.randn(2, 3, 32, 32)
    model = Inception(2, 10, 3)
    out = model(x)
    print('inception:', out.shape)

if __name__ == '__main__':
    main()

