import torch
from torch import nn
from torch.nn import Conv2d, BatchNorm2d, ReLU, Sequential, AdaptiveAvgPool2d, Linear


class SKConv(nn.Module):
    def __init__(self, features, WH, M, G, r, stride=1, L=32):
        """ Constructor
        Args:
            features: input channel dimensionality.
            WH: input spatial dimensionality, used for GAP kernel size.
            M: the number of branchs.
            G: num of convolution groups.
            r: the radio for compute d, the length of z.
            stride: stride, default 1.
            L: the minimum dim of the vector z in paper, default 32.
        """
        super(SKConv, self).__init__()
        d = max(int(features / r), L)
        self.M = M
        self.features = features
        self.convs = nn.ModuleList([])
        for i in range(M):
            self.convs.append(
                nn.Sequential(
                    nn.Conv2d(features,
                              features,
                              kernel_size=3 + i * 2,
                              stride=stride,
                              padding=1 + i,
                              groups=G, bias=False),
                    nn.BatchNorm2d(features),
                    nn.ReLU(inplace=False)))
        # self.gap = nn.AvgPool2d(int(WH/stride))
        self.fc = nn.Linear(features, d)
        self.fcs = nn.ModuleList([])
        for i in range(M):
            self.fcs.append(nn.Linear(d, features))
        self.softmax = nn.Softmax(dim=1)

    def forward(self, x):
        for i, conv in enumerate(self.convs):
            fea = conv(x).unsqueeze(dim=1)
            if i == 0:
                feas = fea
            else:
                feas = torch.cat([feas, fea], dim=1)
        fea_U = torch.sum(feas, dim=1)
        # fea_s = self.gap(fea_U).squeeze_()
        fea_s = fea_U.mean(-1).mean(-1)
        fea_z = self.fc(fea_s)
        for i, fc in enumerate(self.fcs):
            vector = fc(fea_z).unsqueeze(dim=1)
            if i == 0:
                attention_vectors = vector
            else:
                attention_vectors = torch.cat([attention_vectors, vector],
                                              dim=1)
        attention_vectors = self.softmax(attention_vectors)
        attention_vectors = attention_vectors.unsqueeze(-1).unsqueeze(-1)
        fea_v = (feas * attention_vectors).sum(dim=1)
        return fea_v


# 残差块 ResNet18、ResNet34
class BasicBlock(nn.Module):
    expansion = 1  # 残差结构单元中channel不变

    def __init__(self, in_channels, out_channels, stride=1):
        super(BasicBlock, self).__init__()
        # 残差块中的第一个卷积层
        self.conv1 = Conv2d(in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=stride, padding=1,
                            bias=False)
        # Batch Normalization标准化处理
        self.bn1 = BatchNorm2d(out_channels)
        # ReLU激活层
        self.relu = ReLU()
        # 残差块中的第二个卷积层
        self.conv2 = Conv2d(in_channels=out_channels, out_channels=out_channels, kernel_size=3, stride=1,
                            padding=1,
                            bias=False)
        # Batch Normalization标准化处理
        self.bn2 = BatchNorm2d(out_channels)

        # downsample   保证identify和output同shape，可相加
        self.downsample = None
        if stride != 1 or in_channels != out_channels:
            self.downsample = Sequential(
                Conv2d(in_channels, out_channels, kernel_size=1, stride=stride, bias=False),
                BatchNorm2d(out_channels)
            )

    def forward(self, x):
        identify = x
        # 主分支
        output = self.conv1(x)
        output = self.bn1(output)
        output = self.relu(output)
        output = self.conv2(output)
        output = self.bn2(output)

        if self.downsample is not None:
            identify = self.downsample(x)
        # shortcut
        output += identify

        output = self.relu(output)
        return output


# 残差网络（ResNet18）    conv1从7×7,stride=2,padding=3卷积 改为3×3,stride=1,padding=1卷积
class ResNet(nn.Module):
    def __init__(self, block, num_classes=100):
        super(ResNet, self).__init__()
        self.in_channel = 64
        # conv1     input:32×32×3 -> output：32×32×64
        self.conv1 = Conv2d(3, self.in_channel, kernel_size=3, stride=1, padding=1,
                            bias=False)
        self.bn1 = BatchNorm2d(self.in_channel)
        self.relu = ReLU(inplace=True)

        # conv2_x   input:32×32×64 -> output：32×32×64
        # self.maxpool = MaxPool2d(kernel_size=3, stride=2, padding=1)
        self.conv2_x = Sequential(
            block(in_channels=64, out_channels=64),
            block(in_channels=64, out_channels=64),
        )

        # conv3_x   input:32×32×64 -> output：16×16×128
        self.conv3_x = Sequential(
            block(in_channels=64, out_channels=128, stride=2),
            block(in_channels=128, out_channels=128),
        )

        # conv4_x   input:16×16×128 -> output：8×8×256
        self.conv4_x = Sequential(
            block(in_channels=128, out_channels=256, stride=2),
            block(in_channels=256, out_channels=256),
        )

        # conv5_x   input:8×8×256 -> output：4×4×512
        self.conv5_x = Sequential(
            block(in_channels=256, out_channels=512, stride=2),
            block(in_channels=512, out_channels=512),
        )

        # avgpool 自适应平均池化层 4×4×512->1×1×512
        self.avgpool = AdaptiveAvgPool2d((1, 1))

        # linear  全连接层：1×1×512->1×1×100
        self.fc = Linear(512 * block.expansion, num_classes)

    def forward(self, x):
        output = self.conv1(x)
        output = self.bn1(output)
        output = self.relu(output)

        # output=self.maxpool(output)
        output = self.conv2_x(output)
        output = self.conv3_x(output)
        output = self.conv4_x(output)
        output = self.conv5_x(output)

        output = self.avgpool(output)
        output = output.view(-1, 512)
        output = self.fc(output)

        return output


# ResNet18
def resnet18():
    return ResNet(BasicBlock, num_classes=100)


# SK_ResNet
def sk_resnet():
    resnet = resnet18()
    sk_custom_64 = SKConv(64, WH=1, M=2, G=1, r=2)
    resnet.conv2_x = Sequential(
        resnet.conv2_x[0],
        sk_custom_64,
        resnet.conv2_x[1],
        sk_custom_64,
    )
    return resnet
