import torch
from torch import nn
from torchsummary import summary
from modelSource import TAMMoudel
from modelSource import SEModule
from modelSource import InceptionV3


class multi_person_feature(nn.Module):
    def __init__(self, in_channels, classNums):
        super().__init__()
        self.inception = InceptionV3.InceptionC(1, 16)
        self.network1 = nn.Sequential(
            nn.Conv2d(in_channels=64, out_channels=64, kernel_size=(6, 1), padding=(5, 0), stride=(2, 1),
                      groups=in_channels, bias=False),
            nn.BatchNorm2d(64),
            nn.ReLU()
        )
        self.network2 = nn.Sequential(
            nn.Conv2d(in_channels=64, out_channels=128, kernel_size=(3, 1), groups=16, bias=False),
            nn.BatchNorm2d(128),
            nn.ReLU()
        )
        self.network3 = nn.Sequential(
            nn.Conv2d(in_channels=128, out_channels=128, kernel_size=(3, 1), groups=32, bias=False),
            nn.BatchNorm2d(128),
            nn.ReLU()
        )
        self.network4 = nn.Sequential(
            nn.Conv2d(in_channels=128, out_channels=256, kernel_size=(2, 1), groups=64, bias=False),
            nn.BatchNorm2d(256),
            nn.ReLU()
        )

        self.tam1 = TAMMoudel.TAM(256, 2)

        self.network5 = nn.Sequential(
            nn.Conv2d(in_channels=256, out_channels=256, kernel_size=(1, 3), padding=(0, 1), groups=64, bias=False),
            nn.BatchNorm2d(256),
            nn.ReLU()
        )

        # self.tam2 = TAMMoudel.TAM(128, 2)
        self.se = SEModule.SEModule(256)

        self.network6 = nn.Sequential(
            nn.Conv2d(in_channels=256, out_channels=512, kernel_size=(1, 3), groups=128, bias=False),
            nn.BatchNorm2d(512),
            nn.ReLU()
        )
        self.network7 = nn.Sequential(
            nn.Flatten(),
            nn.Linear(512 * 608 * 1, classNums * 2),
            # nn.Linear(256 * 238 * 1, classNums * 2),
            # nn.BatchNorm2d(8),
            nn.Dropout(0.5),
            nn.Linear(classNums * 2, classNums),
            # nn.Softmax(dim=1)
        )

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, nonlinearity='relu')
            elif isinstance(m, nn.Linear):
                nn.init.normal_(m.weight, 0, 0.01)

    def forward(self, x):
        out = self.inception(x)
        out = self.network1(out)
        out = self.network2(out)
        out = self.network3(out)
        out = self.network4(out)
        out = self.tam1(out)
        out = self.network5(out)
        se_out = self.se(out)
        out = out * se_out
        out = self.network6(out)
        out = self.network7(out)
        return out


if __name__ == "__main__":
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    model = multi_person_feature(1, 2).to(device)
    print(summary(model, (1, 6, 610)))

'''
sample
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1           [-1, 16, 6, 610]              96
       BatchNorm2d-2           [-1, 16, 6, 610]              32
              ReLU-3           [-1, 16, 6, 610]               0
            Conv2d-4           [-1, 32, 4, 610]              96
       BatchNorm2d-5           [-1, 32, 4, 610]              64
              ReLU-6           [-1, 32, 4, 610]               0
            Conv2d-7           [-1, 64, 2, 610]             192
       BatchNorm2d-8           [-1, 64, 2, 610]             128
              ReLU-9           [-1, 64, 2, 610]               0
           Conv2d-10           [-1, 64, 1, 610]             128
      BatchNorm2d-11           [-1, 64, 1, 610]             128
             ReLU-12           [-1, 64, 1, 610]               0
           Linear-13                    [-1, 4]               8
      BatchNorm1d-14                    [-1, 4]               8
             ReLU-15                    [-1, 4]               0
           Linear-16                    [-1, 3]              12
          Softmax-17                    [-1, 3]               0
           Conv1d-18                [-1, 16, 2]           3,072
      BatchNorm1d-19                [-1, 16, 2]              32
             ReLU-20                [-1, 16, 2]               0
           Conv1d-21                [-1, 64, 2]           1,024
          Sigmoid-22                [-1, 64, 2]               0
              TAM-23           [-1, 64, 1, 610]               0
           Conv2d-24          [-1, 128, 1, 610]             384
      BatchNorm2d-25          [-1, 128, 1, 610]             256
             ReLU-26          [-1, 128, 1, 610]               0
AdaptiveAvgPool2d-27            [-1, 128, 1, 1]               0
           Linear-28                    [-1, 8]           1,032
             ReLU-29                    [-1, 8]               0
           Linear-30                  [-1, 128]           1,152
          Sigmoid-31                  [-1, 128]               0
         SEModule-32          [-1, 128, 1, 610]               0
           Conv2d-33          [-1, 256, 1, 608]             768
      BatchNorm2d-34          [-1, 256, 1, 608]             512
             ReLU-35          [-1, 256, 1, 608]               0
          Flatten-36               [-1, 155648]               0
           Linear-37                    [-1, 4]         622,596
          Dropout-38                    [-1, 4]               0
           Linear-39                    [-1, 2]              10
          Softmax-40                    [-1, 2]               0
================================================================
Total params: 631,730
Trainable params: 631,730
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.01
Forward/backward pass size (MB): 13.24
Params size (MB): 2.41
Estimated Total Size (MB): 15.67
----------------------------------------------------------------
'''
