'''VGG11/13/16/19 in Pytorch.'''
from torch import optim
import torch
import torch.nn as nn
import torch.nn.functional as F
from .attention import cbam_block, se_block, eca_block
from thop import profile
from torchstat import stat

class Vgg16_net(nn.Module):
    def __init__(self,num_classes=7,attention_name=None):
        super(Vgg16_net, self).__init__()

        self.layer1=nn.Sequential(
            nn.Conv2d(in_channels=1,out_channels=64,kernel_size=3,stride=1,padding=1), #(32-3+2)/1+1=32   32*32*64
            nn.BatchNorm2d(64),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=64,out_channels=64,kernel_size=3,stride=1,padding=1), #(32-3+2)/1+1=32    32*32*64
            nn.BatchNorm2d(64),
            nn.ReLU(inplace=True),

            nn.MaxPool2d(kernel_size=2,stride=2)   #(32-2)/2+1=16         16*16*64
        )


        self.layer2=nn.Sequential(
            nn.Conv2d(in_channels=64,out_channels=128,kernel_size=3,stride=1,padding=1),  #(16-3+2)/1+1=16  16*16*128
            nn.BatchNorm2d(128),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=128,out_channels=128,kernel_size=3,stride=1,padding=1), #(16-3+2)/1+1=16   16*16*128
            nn.BatchNorm2d(128),
            nn.ReLU(inplace=True),

            nn.MaxPool2d(2,2)    #(16-2)/2+1=8     8*8*128
        )

        self.layer3=nn.Sequential(
            nn.Conv2d(in_channels=128,out_channels=256,kernel_size=3,stride=1,padding=1),  #(8-3+2)/1+1=8   8*8*256
            nn.BatchNorm2d(256),
            nn.ReLU(inplace=True),


            nn.Conv2d(in_channels=256,out_channels=256,kernel_size=3,stride=1,padding=1),  #(8-3+2)/1+1=8   8*8*256
            nn.BatchNorm2d(256),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=256,out_channels=256,kernel_size=3,stride=1,padding=1),  #(8-3+2)/1+1=8   8*8*256
            nn.BatchNorm2d(256),
            nn.ReLU(inplace=True),

            nn.MaxPool2d(2,2)     #(8-2)/2+1=4      4*4*256
        )

        self.layer4=nn.Sequential(
            nn.Conv2d(in_channels=256,out_channels=512,kernel_size=3,stride=1,padding=1),  #(4-3+2)/1+1=4    4*4*512
            nn.BatchNorm2d(512),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=512,out_channels=512,kernel_size=3,stride=1,padding=1),   #(4-3+2)/1+1=4    4*4*512
            nn.BatchNorm2d(512),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=512,out_channels=512,kernel_size=3,stride=1,padding=1),   #(4-3+2)/1+1=4    4*4*512
            nn.BatchNorm2d(512),
            nn.ReLU(inplace=True),

            nn.MaxPool2d(2,2)    #(4-2)/2+1=2     2*2*512
        )

        self.layer5=nn.Sequential(
            nn.Conv2d(in_channels=512,out_channels=512,kernel_size=3,stride=1,padding=1),   #(2-3+2)/1+1=2    2*2*512
            nn.BatchNorm2d(512),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=512,out_channels=512,kernel_size=3,stride=1,padding=1),  #(2-3+2)/1+1=2     2*2*512
            nn.BatchNorm2d(512),
            nn.ReLU(inplace=True),

            nn.Conv2d(in_channels=512,out_channels=512,kernel_size=3,stride=1,padding=1),  #(2-3+2)/1+1=2      2*2*512
            nn.BatchNorm2d(512),
            nn.ReLU(inplace=True),

            nn.MaxPool2d(2,2)   #(2-2)/2+1=1      1*1*512
        )


        self.conv=nn.Sequential(
            self.layer1,
            self.layer2,
            self.layer3,
            self.layer4,
            self.layer5
        )
        self.fc=nn.Sequential(
            # nn.Linear(4608,512),  # 23040
            nn.Linear(512,512),  # 23040
            nn.Dropout(0.5),

            nn.Linear(512,256),
            nn.ReLU(inplace=True),
            nn.Dropout(0.5),

            nn.Linear(256,num_classes)
            # nn.Linear(63,)
        )
        self.attention_name = attention_name

        if attention_name != None:
            if attention_name == "se":
                self.seAttention1 = se_block(256)
                self.seAttention2 = se_block(512)
                self.seAttention3 = se_block(512)
            elif attention_name == "cbam":
                self.seAttention1 = cbam_block(256)
                self.seAttention2 = cbam_block(512)
                self.seAttention3 = cbam_block(512)
            else:  # ECA
                self.seAttention1 = eca_block(256)
                self.seAttention2 = eca_block(512)
                self.seAttention3 = eca_block(512)
   

    def forward(self,x):
        # print(x.shape)
        x=self.conv[0](x)
        x=self.conv[1](x)
        # print(x.shape)
        if self.attention_name !=None:

            x=self.conv[2](x)
            x = self.seAttention1(x)  # (batch, 256, 24 ,24)
            
            x=self.conv[3](x)
            x = self.seAttention2(x)  # (batch, 512,12, 12)
            
            x=self.conv[4](x)
            x = self.seAttention3(x)  # (batch, 512, 6, 6)

        else:
            x=self.conv[2](x)
            x=self.conv[3](x)
            x=self.conv[4](x)
        # print(x.shape)
        x = x.view(x.size(0), -1)
        # print(x.shape)
        x=self.fc(x)
        return x


if __name__ == '__main__':

    model = Vgg16_net(num_classes=7)
    x=torch.randn(64,1,48,48)
    output =   model(x)
    print(output.shape)
