'''网络结构'''

from torch import nn






class Conv2d_ycz(nn.Module):
    '''应为基本所有的块都是Conv2d后接BatchNorm，所以直接定义为一个类，免得麻烦'''
    '''Relu直接在forword中'''

    def __init__(self, filter_in, filter_out, kernel_size, stride=1, bias=False, padding=0):
        super(Conv2d_ycz.self).__init__()
        self.conv1 = nn.Conv2d(filter_in, filter_out, kernel_size=kernel_size, stride=stride, padding=padding,
                               bias=bias)
        self.bn = nn.BatchNorm2d(filter_out)

    def forward(self, x):
        out = self.conv1(x)
        out = self.bn(out)
        return out


class ConvBlock(nn.Module):
    def __init__(self, inplanes, planes, stride=1, expansion=4):
        super(ConvBlock, self).__init__()
        self.Conv1 = Conv2d_ycz(inplanes, planes / 4, kernel_size=1, stride=stride, bias=False)
        self.Conv2 = Conv2d_ycz(planes, planes / 4, kernel_size=3, stride=1, padding=1, bias=False)
        self.Conv3 = Conv2d_ycz(planes, planes, kernel_size=1, bias=False)

        self.Conv4 = Conv2d_ycz(inplanes, planes / 4 * expansion, kernel_size=1, stride=stride, bias=False)

        self.RuLu = nn.ReLU(inplace=True)

    def forward(self, x):
        residual = self.ReLU(self.Conv4(x))

        out = self.ReLU(self.Conv1(x))
        out = self.ReLU(self.Conv2(out))
        out = self.ReLU(self.Conv3(out))

        out = out + residual
        out = self.ReLU(out)
        return out


class IdentityBlock(nn.Module):
    def __init__(self, inplanes, planes, stride=1):
        super(ConvBlock, self).__init__()
        self.Conv1 = Conv2d_ycz(inplanes, planes / 4, kernel_size=1, stride=stride, bias=False)
        self.Conv2 = Conv2d_ycz(planes, planes / 4, kernel_size=3, stride=1, padding=1, bias=False)
        self.Conv3 = Conv2d_ycz(planes, planes, kernel_size=1, bias=False)

        self.RuLu = nn.ReLU(inplace=True)

    def forward(self, x):
        residual = x

        out = self.ReLU(self.Conv1(x))
        out = self.ReLU(self.Conv2(out))
        out = self.ReLU(self.Conv3(out))

        out = out + residual
        out = self.ReLU(out)
        return out


class ResNet50(nn.Module):
    def __init__(self, expansion=4, num_classes=1000):
        super(ResNet50, self).__init__()

        self.layers = [3, 4, 6, 3]
        self.inplanes = 64
        self.ReLU = nn.ReLU(inplace=True)
        # 512,512,3 -> 256,256,64
        self.Conv1 = Conv2d_ycz(3, 64, kernel_size=7, stride=2, padding=3, bias=False)

        # 256x256x64 -> 128x128x64
        self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=0, ceil_mode=True)  # change

        # 128x128x64 -> 128x128x256
        self.ConvBlock1 = ConvBlock(inplanes=64, planes=256)
        self.IdentityBlock11 = IdentityBlock(inplanes=256, planes=256)
        self.IdentityBlock12 = IdentityBlock(inplanes=256, planes=256)

        # 128x128x256 -> 64x64x512
        self.ConvBlock2 = ConvBlock(inplanes=256, planes=512, stride=2)
        self.IdentityBlock21 = IdentityBlock(inplanes=512, planes=512, stride=2)
        self.IdentityBlock22 = IdentityBlock(inplanes=512, planes=512, stride=2)
        self.IdentityBlock23 = IdentityBlock(inplanes=512, planes=512, stride=2)

        # 64x64x512 -> 32x32x1024
        self.ConvBlock3 = ConvBlock(inplanes=512, planes=1024, stride=2)
        self.IdentityBlock31 = IdentityBlock(inplanes=1024, planes=1024, stride=2)
        self.IdentityBlock32 = IdentityBlock(inplanes=1024, planes=1024, stride=2)
        self.IdentityBlock33 = IdentityBlock(inplanes=1024, planes=1024, stride=2)
        self.IdentityBlock34 = IdentityBlock(inplanes=1024, planes=1024, stride=2)
        self.IdentityBlock35 = IdentityBlock(inplanes=1024, planes=1024, stride=2)

        # 32x32x1024 -> 16x16x2048
        self.ConvBlock4 = ConvBlock(inplanes=1024, planes=2048, stride=2)
        self.IdentityBlock41 = IdentityBlock(inplanes=2048, planes=2048, stride=2)

        self.avgpool = nn.AvgPool2d(7)
        self.fc = nn.Linear(512 * expansion, num_classes)

    def forward(self, x):
        out = self.ReLU(self.Conv1(x))
        out = self.maxpool(out)

        # 128x128x64 -> 128x128x256
        out = self.ConvBlock1(out)
        out = self.IdentityBlock11(out)
        out = self.IdentityBlock12(out)

        # 128x128x256 -> 64x64x512
        out = self.ConvBlock2(out)
        out = self.IdentityBlock21(out)
        out = self.IdentityBlock22(out)
        out = self.IdentityBlock23(out)

        # 64x64x512 -> 32x32x1024
        out = self.ConvBlock3(out)
        out = self.IdentityBlock31(out)
        out = self.IdentityBlock32(out)
        out = self.IdentityBlock33(out)
        out = self.IdentityBlock34(out)
        out = self.IdentityBlock35(out)

        # 32x32x1024 -> 16x16x2048
        out = self.ConvBlock4(out)
        out = self.IdentityBlock41(out)

        out = self.avgpool(out)
        out = x.view(out.size(0), -1)
        out = self.fc(out)
        return out


def resnet50(pretrain=True,expansion=4, num_classes=1000):
    model = ResNet50(expansion=expansion, num_classes=num_classes)


    # 获取特征提取部分
    features = list(
            [model.conv1, model.bn1, model.relu, model.maxpool, model.layer1, model.layer2, model.layer3, model.layer4])
    features = nn.Sequential(*features)
    return features


class CenterNet_UpSampling(nn.Module):
    '''三次上采样'''
    def __init__(self,inplanes,bn_momentum=0.1):
        super(CenterNet_UpSampling, self).__init__()
        self.bn_momentum=bn_momentum
        self.inplanes=inplanes
        self.deconv_with_bias=False

        self.ReLU=nn.ReLU(inplace=inplanes)

        #up sampling1 16,16,2048->32,32,256
        self.UpSampling1=nn.ConvTranspose2d(in_channels=self.inplanes,out_channels=256,kernel_size=4,
                                            stride=2,padding=1,output_padding=0,bias=self.deconv_with_bias)
        self.BatchNorm2d1=nn.BatchNorm2d(256,momentum=self.bn_momentum)

        # up sampling2 32,32,256->64,64,128
        self.UpSampling2 = nn.ConvTranspose2d(in_channels=256, out_channels=128, kernel_size=4,
                                              stride=2, padding=1, output_padding=0, bias=self.deconv_with_bias)
        self.BatchNorm2d2 = nn.BatchNorm2d(128, momentum=self.bn_momentum)

        # up sampling3 64,64,128->128,128,64
        self.UpSampling3 = nn.ConvTranspose2d(in_channels=128, out_channels=64, kernel_size=4,
                                              stride=2, padding=1, output_padding=0, bias=self.deconv_with_bias)
        self.BatchNorm2d3 = nn.BatchNorm2d(256, momentum=self.bn_momentum)

    def forward(self,x):
        out=self.UpSampling1(x)
        out=self.ReLU(self.BatchNorm2d1(out))

        out = self.UpSampling2(x)
        out = self.ReLU(self.BatchNorm2d2(out))

        out = self.UpSampling3(x)
        out = self.ReLU(self.BatchNorm2d3(out))

        return out


class CenterHead(nn.Module):
    def __init__(self, num_classes=80, channel=64, bn_momentum=0.1):
        super(CenterHead, self).__init__()
        # 热力图预测部分
        self.cls_head = nn.Sequential(
            nn.Conv2d(64, channel,
                      kernel_size=3, padding=1, bias=False),
            nn.BatchNorm2d(64, momentum=bn_momentum),
            nn.ReLU(inplace=True),
            nn.Conv2d(channel, num_classes,
                      kernel_size=1, stride=1, padding=0))
        # 宽高预测的部分
        self.wh_head = nn.Sequential(
            nn.Conv2d(64, channel,
                      kernel_size=3, padding=1, bias=False),
            nn.BatchNorm2d(64, momentum=bn_momentum),
            nn.ReLU(inplace=True),
            nn.Conv2d(channel, 2,
                      kernel_size=1, stride=1, padding=0))

        # 中心点预测的部分
        self.reg_head = nn.Sequential(
            nn.Conv2d(64, channel,
                      kernel_size=3, padding=1, bias=False),
            nn.BatchNorm2d(64, momentum=bn_momentum),
            nn.ReLU(inplace=True),
            nn.Conv2d(channel, 2,
                      kernel_size=1, stride=1, padding=0))

    def forward(self, x):
        hm = self.cls_head(x).sigmoid_()
        wh = self.wh_head(x)
        offset = self.reg_head(x)
        return hm, wh, offset


class CenterNet(nn.Module):
    def __init__(self, num_classes = 20, pretrain = False,expansion=4):
        self.backbone=resnet50(expansion=expansion,num_classes=num_classes,pretrain=pretrain)
        self.UpSampling=CenterNet_UpSampling(inplanes=2048)
        self.head=CenterHead(channel=64, num_classes=num_classes)

    def freeze_backbone(self):
        for param in self.backbone.parameters():
            param.requires_grad = False

    def unfreeze_backbone(self):
        for param in self.backbone.parameters():
            param.requires_grad = True

    def forward(self, x):
        feat = self.backbone(x)
        return self.head(self.UpSampling(feat))



