import torch
from torch import nn
from torchsummary import summary


class multi_person_feature(nn.Module):
    def __init__(self, in_channels, classNums):
        super().__init__()
        self.network1 = nn.Sequential(
            nn.Conv2d(in_channels=in_channels, out_channels=16, kernel_size=(6, 1), padding=(5, 0), stride=(2, 1),
                      groups=in_channels, bias=False),
            nn.BatchNorm2d(16),
            nn.ReLU()
        )
        self.network2 = nn.Sequential(
            nn.Conv2d(in_channels=16, out_channels=32, kernel_size=(3, 1), groups=16, bias=False),
            nn.BatchNorm2d(32),
            nn.ReLU()
        )
        self.network3 = nn.Sequential(
            nn.Conv2d(in_channels=32, out_channels=64, kernel_size=(3, 1), groups=32, bias=False),
            nn.BatchNorm2d(64),
            nn.ReLU()
        )
        self.network4 = nn.Sequential(
            nn.Conv2d(in_channels=64, out_channels=64, kernel_size=(2, 1), groups=64, bias=False),
            nn.BatchNorm2d(64),
            nn.ReLU()
        )
        self.network5 = nn.Sequential(
            nn.Conv2d(in_channels=64, out_channels=128, kernel_size=(1, 3), padding=(0, 1), groups=64, bias=False),
            nn.BatchNorm2d(128),
            nn.ReLU()
        )
        self.network6 = nn.Sequential(
            nn.Conv2d(in_channels=128, out_channels=256, kernel_size=(1, 3), groups=128, bias=False),
            nn.BatchNorm2d(256),
            nn.ReLU()
        )
        self.network7 = nn.Sequential(
            nn.Flatten(),
            nn.Linear(256 * 608 * 1, classNums * 2),
            # nn.Linear(256 * 238 * 1, classNums * 2),
            # nn.BatchNorm2d(8),
            nn.Dropout(0.5),
            nn.Linear(classNums * 2, classNums),
            nn.Softmax(dim=1)
        )

        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.kaiming_normal_(m.weight, nonlinearity='relu')
            elif isinstance(m, nn.Linear):
                nn.init.normal_(m.weight, 0, 0.01)

    def forward(self, x):
        out = self.network1(x)
        # print(out.size(0),out.size(1),out.size(2),out.size(3))
        out = self.network2(out)
        # print(out.size(0),out.size(1),out.size(2),out.size(3))
        out = self.network3(out)
        # print(out.size(0),out.size(1),out.size(2),out.size(3))
        out = self.network4(out)
        # print(out.size(0),out.size(1),out.size(2),out.size(3))
        out = self.network5(out)
        # print(out.size(0),out.size(1),out.size(2),out.size(3))
        out = self.network6(out)
        # print(out.size(0),out.size(1),out.size(2),out.size(3))
        out = self.network7(out)
        return out


if __name__ == "__main__":
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    model = multi_person_feature(1, 2).to(device)
    print(summary(model, (1, 6, 240)))

'''
sample
----------------------------------------------------------------
        Layer (type)               Output Shape         Param #
================================================================
            Conv2d-1           [-1, 16, 6, 240]              96
       BatchNorm2d-2           [-1, 16, 6, 240]              32
              ReLU-3           [-1, 16, 6, 240]               0
            Conv2d-4           [-1, 32, 4, 240]              96
       BatchNorm2d-5           [-1, 32, 4, 240]              64
              ReLU-6           [-1, 32, 4, 240]               0
            Conv2d-7           [-1, 64, 2, 240]             192
       BatchNorm2d-8           [-1, 64, 2, 240]             128
              ReLU-9           [-1, 64, 2, 240]               0
           Conv2d-10           [-1, 64, 1, 240]             128
      BatchNorm2d-11           [-1, 64, 1, 240]             128
             ReLU-12           [-1, 64, 1, 240]               0
           Conv2d-13          [-1, 128, 1, 240]             384
      BatchNorm2d-14          [-1, 128, 1, 240]             256
             ReLU-15          [-1, 128, 1, 240]               0
           Conv2d-16          [-1, 256, 1, 238]             768
      BatchNorm2d-17          [-1, 256, 1, 238]             512
             ReLU-18          [-1, 256, 1, 238]               0
          Flatten-19                [-1, 60928]               0
           Linear-20                    [-1, 4]         243,716
          Dropout-21                    [-1, 4]               0
           Linear-22                    [-1, 2]              10
          Softmax-23                    [-1, 2]               0
================================================================
Total params: 246,510
Trainable params: 246,510
Non-trainable params: 0
----------------------------------------------------------------
Input size (MB): 0.01
Forward/backward pass size (MB): 4.85
Params size (MB): 0.94
Estimated Total Size (MB): 5.79
----------------------------------------------------------------
'''
