import torch.nn as nn
import torch.nn.functional as F
import torch
print(torch.__version__)
print(torch.cuda.is_available())


# 残差块
class ResidualBlock(nn.Module):
    def __init__(self, in_channel, out_channel, stride=1):
        super(ResidualBlock, self).__init__()
        self.channel = in_channel
        self.res1 = nn.Sequential(
            nn.Conv2d(in_channel, out_channel, kernel_size=3, stride=stride, padding=1),  # 保持尺寸不变
            nn.BatchNorm2d(out_channel),
            nn.ReLU(inplace=True),
            nn.Conv2d(out_channel, out_channel, kernel_size=3, stride=1, padding=1),
            nn.BatchNorm2d(out_channel)
        )
        self.shortcut = nn.Sequential()
        if stride != 1 or in_channel != out_channel:      # Shutcuts用于构建 Conv Block 和 Identity Block
            self.shortcut = nn.Sequential(
                nn.Conv2d(in_channel, out_channel,
                          kernel_size=1, stride=stride, bias=False),
                nn.BatchNorm2d(out_channel)
            )


    def forward(self, x):
        out = self.res1(x)
        out = out + self.shortcut(x)
        out = F.relu(out)
        return out

# SKNet Block
class SKConv(nn.Module):
    def __init__(self, features, WH, M, G, r, stride=1 ,L=32):
        """ Constructor
        Args:
            features: input channel dimensionality.
            WH: input spatial dimensionality, used for GAP kernel size.
            M: the number of branchs.
            G: num of convolution groups.
            r: the radio for compute d, the length of z.
            stride: stride, default 1.
            L: the minimum dim of the vector z in paper, default 32.
        """
        super(SKConv, self).__init__()
        d = max(int(features/r), L)
        self.M = M
        self.features = features
        self.convs = nn.ModuleList([])
        for i in range(M):
            self.convs.append(nn.Sequential(
                nn.Conv2d(features, features, kernel_size=3+i*2, stride=stride, padding=1+i, groups=G),
                nn.BatchNorm2d(features),
                nn.ReLU(inplace=False)
            ))
        # self.gap = nn.AvgPool2d(int(WH/stride))
        self.fc = nn.Linear(features, d)
        self.fcs = nn.ModuleList([])
        for i in range(M):
            self.fcs.append(
                nn.Linear(d, features)
            )
        self.softmax = nn.Softmax(dim=1)

    def forward(self, x):
        for i, conv in enumerate(self.convs):
            fea = conv(x).unsqueeze_(dim=1)
            if i == 0:
                feas = fea
            else:
                feas = torch.cat([feas, fea], dim=1)
        fea_U = torch.sum(feas, dim=1)
        # fea_s = self.gap(fea_U).squeeze_()
        fea_s = fea_U.mean(-1).mean(-1)
        fea_z = self.fc(fea_s)
        for i, fc in enumerate(self.fcs):
            vector = fc(fea_z).unsqueeze_(dim=1)
            if i == 0:
                attention_vectors = vector
            else:
                attention_vectors = torch.cat([attention_vectors, vector], dim=1)
        attention_vectors = self.softmax(attention_vectors)
        attention_vectors = attention_vectors.unsqueeze(-1).unsqueeze(-1)
        fea_v = (feas * attention_vectors).sum(dim=1)
        return fea_v

# 残差网络
class ResNet(nn.Module):
    def __init__(self):
        super(ResNet, self).__init__()
        self.conv1 = nn.Conv2d(3, 64, kernel_size=3,stride=1, padding=1, bias=False)  #[3,32,32] -> [8,32,32]
        self.bn1 = nn.BatchNorm2d(64)

        self.skn1 = SKConv(64, 32, 2, 8, 2)
        self.skn2 = SKConv(64, 32, 2, 8, 2)
        self.reslayer1_1 = ResidualBlock(64,64) # [64,32,32]
        self.reslayer1_2 = ResidualBlock(64,64) # [64,32,32]

        self.reslayer2_1 = ResidualBlock(64,128,2) # [64,32,32] -> [128,16,16]
        self.reslayer2_2 = ResidualBlock(128,128)  # [128,16,16]
        self.skn3 = SKConv(128, 32, 2, 8, 2)

        self.reslayer3_1 = ResidualBlock(128,256,2) #[128,16,16] -> [256,8,8]
        self.reslayer3_2 = ResidualBlock(256,256)  #[256,8,8]
        self.skn4 = SKConv(256, 32, 2, 8, 2)

        self.reslayer4_1 = ResidualBlock(256,512,2) #[256,8,8] -> [512,4,4]
        self.reslayer4_2 = ResidualBlock(512,512)  #[512,4,4]
        self.skn5 = SKConv(512, 32, 2, 8, 2)
        self.pool = nn.AvgPool2d(4) # [512,1,1]

        self.fc = nn.Linear(512, 100)

    def forward(self, x):
        out = self.conv1(x)
        out = self.bn1(out)
        out = self.skn1(out)
        out = self.skn2(out)
        out = self.reslayer2_1(out)
        out = self.skn3(out)
        out = self.reslayer3_1(out)
        out = self.skn4(out)
        out = self.reslayer4_1(out)
        out = self.skn5(out)
        out = self.pool(out)
        out = out.view(out.size(0),-1)
        out = self.fc(out)
        return out


net = ResNet()
print(net)
# print(net)
net = net.cpu()
print("Total number of paramerters in networks is {}  ".format(sum(x.numel() for x in net.parameters())))