import torch
from torch import nn
from torchsummary import summary
from modelSource import TAMMoudel
from modelSource import SEModule
from modelSource import InceptionV3


class multi_person_feature(nn.Module):
    def __init__(self, in_channels, classNums):
        super().__init__()
        self.inception = InceptionV3.InceptionC(1, 16)
        self.network1 = nn.Sequential(
            nn.Conv2d(in_channels=64, out_channels=64, kernel_size=(6, 1), padding=(5, 0), stride=(2, 1),
                      groups=in_channels, bias=False),
            nn.BatchNorm2d(64),
            nn.ReLU()
        )
        self.network2 = nn.Sequential(
            nn.Conv2d(in_channels=64, out_channels=128, kernel_size=(3, 1), groups=16, bias=False),
            nn.BatchNorm2d(128),
            nn.ReLU()
        )
        self.network3 = nn.Sequential(
            nn.Conv2d(in_channels=128, out_channels=128, kernel_size=(3, 1), groups=32, bias=False),
            nn.BatchNorm2d(128),
            nn.ReLU()
        )
        self.network4 = nn.Sequential(
            nn.Conv2d(in_channels=128, out_channels=256, kernel_size=(2, 1), groups=64, bias=False),
            nn.BatchNorm2d(256),
            nn.ReLU()
        )

        # self.tam1 = TAMMoudel.TAM(256, 1)
        self.se = SEModule.SEModule(256)

        self.network5 = nn.Sequential(
            nn.Conv2d(in_channels=256, out_channels=256, kernel_size=(1, 3), padding=(0, 1), groups=64, bias=False),
            nn.BatchNorm2d(256),
            nn.ReLU()
        )

        # self.tam2 = TAMMoudel.TAM(128, 2)
        # self.se = SEModule.SEModule(256)
        self.tam1 = TAMMoudel.TAM(256, 1)

        self.network6 = nn.Sequential(
            nn.Conv2d(in_channels=256, out_channels=512, kernel_size=(1, 3), groups=128, bias=False),
            nn.BatchNorm2d(512),
            nn.ReLU()
        )
        self.network7 = nn.Sequential(
            nn.Flatten(),
            # nn.Linear(512 * 10 * 1, classNums * 2),
            nn.Linear(256 * 610 * 1, classNums * 2),
            # nn.BatchNorm2d(8),
            nn.Dropout(0.5),
            nn.Linear(classNums * 2, classNums),
            # nn.Softmax(dim=1)
        )

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, nonlinearity='relu')
            elif isinstance(m, nn.Linear):
                nn.init.normal_(m.weight, 0, 0.01)

    def forward(self, x):
        out = self.inception(x)
        out = self.network1(out)
        out = self.network2(out)
        out = self.network3(out)
        out = self.network4(out)
        se_out = self.se(out)
        out = out * se_out
        # out = self.tam1(out)
        out = self.network5(out)
        # se_out = self.se(out)
        # out = out * se_out
        out = self.tam1(out)
        out = self.network6(out)
        out = self.network7(out)
        return out


if __name__ == "__main__":
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    model = multi_person_feature(1, 2).to(device)
    print(summary(model, (1, 6, 610)))

'''
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1           [-1, 16, 6, 612]              16
       BatchNorm2d-2           [-1, 16, 6, 612]              32
       BasicConv2d-3           [-1, 16, 6, 612]               0
            Conv2d-4           [-1, 16, 6, 612]              16
       BatchNorm2d-5           [-1, 16, 6, 612]              32
       BasicConv2d-6           [-1, 16, 6, 612]               0
            Conv2d-7           [-1, 16, 6, 612]           1,280
       BatchNorm2d-8           [-1, 16, 6, 612]              32
       BasicConv2d-9           [-1, 16, 6, 612]               0
           Conv2d-10           [-1, 16, 6, 612]           1,280
      BatchNorm2d-11           [-1, 16, 6, 612]              32
      BasicConv2d-12           [-1, 16, 6, 612]               0
           Conv2d-13           [-1, 16, 6, 612]              16
      BatchNorm2d-14           [-1, 16, 6, 612]              32
      BasicConv2d-15           [-1, 16, 6, 612]               0
           Conv2d-16           [-1, 16, 6, 612]           1,280
      BatchNorm2d-17           [-1, 16, 6, 612]              32
      BasicConv2d-18           [-1, 16, 6, 612]               0
           Conv2d-19           [-1, 16, 6, 612]           1,280
      BatchNorm2d-20           [-1, 16, 6, 612]              32
      BasicConv2d-21           [-1, 16, 6, 612]               0
           Conv2d-22           [-1, 16, 6, 612]           1,280
      BatchNorm2d-23           [-1, 16, 6, 612]              32
      BasicConv2d-24           [-1, 16, 6, 612]               0
           Conv2d-25           [-1, 16, 6, 612]           1,280
      BatchNorm2d-26           [-1, 16, 6, 612]              32
      BasicConv2d-27           [-1, 16, 6, 612]               0
           Conv2d-28           [-1, 16, 6, 612]              16
      BatchNorm2d-29           [-1, 16, 6, 612]              32
      BasicConv2d-30           [-1, 16, 6, 612]               0
       InceptionC-31           [-1, 64, 6, 612]               0
           Conv2d-32           [-1, 64, 6, 612]          24,576
      BatchNorm2d-33           [-1, 64, 6, 612]             128
             ReLU-34           [-1, 64, 6, 612]               0
           Conv2d-35          [-1, 128, 4, 612]           1,536
      BatchNorm2d-36          [-1, 128, 4, 612]             256
             ReLU-37          [-1, 128, 4, 612]               0
           Conv2d-38          [-1, 128, 2, 612]           1,536
      BatchNorm2d-39          [-1, 128, 2, 612]             256
             ReLU-40          [-1, 128, 2, 612]               0
           Conv2d-41          [-1, 256, 1, 612]           1,024
      BatchNorm2d-42          [-1, 256, 1, 612]             512
             ReLU-43          [-1, 256, 1, 612]               0
AdaptiveAvgPool2d-44            [-1, 256, 1, 1]               0
           Linear-45                   [-1, 16]           4,112
             ReLU-46                   [-1, 16]               0
           Linear-47                  [-1, 256]           4,352
          Sigmoid-48                  [-1, 256]               0
         SEModule-49          [-1, 256, 1, 612]               0
           Conv2d-50          [-1, 256, 1, 612]           3,072
      BatchNorm2d-51          [-1, 256, 1, 612]             512
             ReLU-52          [-1, 256, 1, 612]               0
           Linear-53                    [-1, 2]               2
      BatchNorm1d-54                    [-1, 2]               4
             ReLU-55                    [-1, 2]               0
           Linear-56                    [-1, 3]               6
          Softmax-57                    [-1, 3]               0
           Conv1d-58                [-1, 64, 1]          49,152
      BatchNorm1d-59                [-1, 64, 1]             128
             ReLU-60                [-1, 64, 1]               0
           Conv1d-61               [-1, 256, 1]          16,384
          Sigmoid-62               [-1, 256, 1]               0
              TAM-63          [-1, 256, 1, 612]               0
           Conv2d-64          [-1, 512, 1, 610]           3,072
      BatchNorm2d-65          [-1, 512, 1, 610]           1,024
             ReLU-66          [-1, 512, 1, 610]               0
          Flatten-67               [-1, 312320]               0
           Linear-68                    [-1, 4]       1,249,284
          Dropout-69                    [-1, 4]               0
           Linear-70                    [-1, 2]              10
================================================================
Total params: 1,369,002
Trainable params: 1,369,002
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.01
Forward/backward pass size (MB): 50.48
Params size (MB): 5.22
Estimated Total Size (MB): 55.72
----------------------------------------------------------------
None
'''
