'''
@author: zhangkai
@license: (C) Copyright 2017-2023
@contact: jeffcobile@gmail.com
@Software : PyCharm
@file: vgg.py
@time: 2020-06-16 09:41:17
@desc: 
'''
import torch
from jjzhk.config import ZKCFG
from ELib.backbone.backbone_zoo import BACKBONE_ZOO, BackboneSeg


@BACKBONE_ZOO.register()
def vgg16(cfg:ZKCFG):
    return VGG(cfg)


class VGG(BackboneSeg):
    def __init__(self, cfg):
        super(VGG, self).__init__(cfg)
        self.cfg = cfg

    def _create_network_(self):
        LeakyReLU = True
        layers = []
        layers += [torch.nn.Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)]
        layers += [torch.nn.Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)]
        layers += [torch.nn.Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=True)]
        layers += [torch.nn.Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)]
        layers += [torch.nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.MaxPool2d(kernel_size=3, stride=1, padding=1, dilation=1, ceil_mode=False)]
        layers += [torch.nn.Conv2d(512, 1024, kernel_size=(3, 3), stride=(1, 1), padding=(6, 6), dilation=(6, 6), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]
        layers += [torch.nn.Conv2d(1024, 1024, kernel_size=(1, 1), stride=(1, 1), padding_mode='zeros')]
        layers += [torch.nn.LeakyReLU(negative_slope=0.1) if LeakyReLU == True else torch.nn.ReLU(inplace=True)]

        return layers