from torch import nn
from torch.nn import functional as F
import torch
from torchvision import models
# from transformer import TBAM
# from vit_sp import TBAM
from new_vit import TBAM

class ResNet18(nn.Module):
    def __init__(self, pretrained=False, num_classes=7, drop_rate=0):
        super(ResNet18, self).__init__()
        self.drop_rate = drop_rate
        resnet = models.resnet18(pretrained)
        self.features = nn.Sequential(*list(resnet.children())[:-1])
        self.fc = nn.Linear(512, num_classes)


    def forward(self, x):
        x = self.features(x)

        if self.drop_rate > 0:
            x =  nn.Dropout(self.drop_rate)(x)
        x = x.view(x.size(0), -1)
        out = self.fc(x)

        return out, out



class ResNet18_ARM___RAF(nn.Module):
    def __init__(self, pretrained=True, num_classes=7, drop_rate=0):
        super(ResNet18_ARM___RAF, self).__init__()
        self.drop_rate = drop_rate
        resnet = models.resnet18(pretrained)
        # self.features = nn.Sequential(*list(resnet.children())[:-2])  # before avgpool 512x1
        #--------------
        # num_features = resnet.fc.in_features
        # resnet18.fc = nn.Linear(num_features,7)
        # resnet18.fc = nn.Linear(512,7)
        children = list(resnet.children())

        self.pre_conv = nn.Sequential(*children[0:4])
        
        self.block1_1 = children[4][0] 
        self.block1_2 = children[4][1]
        
        self.block2_1 = children[5][0]
        self.block2_2 = children[5][1]
        # self.att2 = ...
        self.block3_1 = children[6][0]
        self.block3_2 = children[6][1]
        # self.att3 = ...
        self.block4_1 = children[7][0]
        self.block4_2 = children[7][1]


        self.arrangement = nn.PixelShuffle(16)
        self.arm = Amend_raf()
        self.fc = nn.Linear(121, num_classes)
        # self.TBAM1 = TBAM(ch_embed_dim=56*56,num_heads=3,ch_num_patchs=64, ffn_dim=128,patch_size=7)
        # self.TBAM1_1 = TBAM(ch_embed_dim=56*56,num_heads=3,ch_num_patchs=64, ffn_dim=128,patch_size=7)

        # self.TBAM2 = TBAM(ch_embed_dim=28*28,num_heads=3,ch_num_patchs=128,ffn_dim=128,patch_size=7)
        # self.TBAM2_1 = TBAM(ch_embed_dim=28*28,num_heads=3,ch_num_patchs=128,ffn_dim=128,patch_size=7)

        # self.TBAM3 = TBAM(ch_embed_dim=14*14,num_heads=3,ch_num_patchs=256,ffn_dim=128,
        #     patch_size=7)
        # self.TBAM3_1 = TBAM(ch_embed_dim=14*14,num_heads=3,ch_num_patchs=256,ffn_dim=128,
        #     patch_size=7)

        # self.TBAM4 = TBAM(ch_embed_dim=7*7,num_heads=3,ch_num_patchs=512,ffn_dim=128,
        #     patch_size=7)
        # self.TBAM4_1 = TBAM(ch_embed_dim=7*7,num_heads=3,ch_num_patchs=512,ffn_dim=128,
        #     patch_size=7)


    def forward(self, x):
        # x = self.features(x)
        x = self.pre_conv(x)
        x = self.block1_1(x)
        # x = self.TBAM1(x)
        # print(x.size())
        x = self.block1_2(x)
        # x = self.TBAM1_1(x)

        x = self.block2_1(x)
        # x = self.TBAM2(x)
        x = self.block2_2(x)
        # x = self.TBAM2_1(x)

        x = self.block3_1(x)
        # x = self.TBAM3(x)
        x = self.block3_2(x)
        # x = self.TBAM3_1(x)

        x = self.block4_1(x)
        # x = self.TBAM4(x)
        x = self.block4_2(x)
        # x = self.TBAM4_1(x)



        # print("------- {} ".format(x.size()))

        x = self.arrangement(x)

        x, alpha = self.arm(x)

        if self.drop_rate > 0:
            x = nn.Dropout(self.drop_rate)(x)

        x = x.view(x.size(0), -1)
        out = self.fc(x)

        return out

class Amend_raf(nn.Module):  # moren
    def __init__(self, inplace=2):
        super(Amend_raf, self).__init__()
        self.de_albino = nn.Conv2d(in_channels=1, out_channels=1, kernel_size=32, stride=8, padding=0, bias=False)
        self.bn = nn.BatchNorm2d(inplace)
        self.alpha = nn.Parameter(torch.tensor([1.0]))

    def forward(self, x):
        mask = torch.tensor([]).cuda()
        createVar = locals()
        for i in range(x.size(1)):
            createVar['x' + str(i)] = torch.unsqueeze(x[:, i], 1)
            createVar['x' + str(i)] = self.de_albino(createVar['x' + str(i)])
            mask = torch.cat((mask, createVar['x' + str(i)]), 1)
        x = self.bn(mask)
        xmax, _ = torch.max(x, 1, keepdim=True)
        global_mean = x.mean(dim=[0, 1])
        xmean = torch.mean(x, 1, keepdim=True)
        xmin, _ = torch.min(x, 1, keepdim=True)
        x = xmean + self.alpha * global_mean

        return x, self.alpha





# if __name__=='__main__':
#     model = ResNet18_ARM___RAF()
#     model.cuda()
#     input = torch.randn(1, 3, 224, 224)
#     out, alpha = model(input.cuda())
#     print(out.size())
