"""
@Project    : cosmo-face
@Module     : head.py
@Author     : HuangJiWen[huangjiwen@haier.com]
@Created    : 2020/8/17 15:22
@Desc       : keypoint head
"""

import torch.nn as nn


class KeypointHead(nn.Module):

    def __init__(self, intermediate_channel, head_conv, num_joints):
        super(KeypointHead, self).__init__()
        self.hm = nn.Sequential(
            nn.Conv2d(intermediate_channel, head_conv, kernel_size=3, padding=1, bias=True),
            nn.ReLU(inplace=True),
            nn.Conv2d(head_conv, 1, kernel_size=1, stride=1, padding=0))
        self.wh = nn.Sequential(
            nn.Conv2d(intermediate_channel, head_conv, kernel_size=3, padding=1, bias=True),
            nn.ReLU(inplace=True),
            nn.Conv2d(head_conv, 2, kernel_size=1, stride=1, padding=0))
        self.hps = nn.Sequential(
            nn.Conv2d(intermediate_channel, head_conv, kernel_size=3, padding=1, bias=True),
            nn.ReLU(inplace=True),
            nn.Conv2d(head_conv, num_joints * 2, kernel_size=1, stride=1, padding=0))
        self.reg = nn.Sequential(
            nn.Conv2d(intermediate_channel, head_conv, kernel_size=3, padding=1, bias=True),
            nn.ReLU(inplace=True),
            nn.Conv2d(head_conv, 2, kernel_size=1, stride=1, padding=0))
        self.hm_hp = nn.Sequential(
            nn.Conv2d(intermediate_channel, head_conv, kernel_size=3, padding=1, bias=True),
            nn.ReLU(inplace=True),
            nn.Conv2d(head_conv, num_joints, kernel_size=1, stride=1, padding=0))
        self.hp_offset = nn.Sequential(
            nn.Conv2d(intermediate_channel, head_conv, kernel_size=3, padding=1, bias=True),
            nn.ReLU(inplace=True),
            nn.Conv2d(head_conv, 2, kernel_size=1, stride=1, padding=0))
        self.init_weights()

    def init_weights(self):
        self.hm[-1].bias.data.fill_(-2.19)
        self.hm_hp[-1].bias.data.fill_(-2.19)
        self.fill_fc_weights(self.wh)
        self.fill_fc_weights(self.hps)
        self.fill_fc_weights(self.reg)
        self.fill_fc_weights(self.hp_offset)

    def fill_fc_weights(self, layers):
        for m in layers.modules():
            if isinstance(m, nn.Conv2d):
                nn.init.normal_(m.weight, std=0.001)
                if m.bias is not None:
                    nn.init.constant_(m.bias, 0)

    def forward(self, x):

        return [self.hm(x), self.wh(x), self.hps(x), self.reg(x), self.hm_hp(x), self.hp_offset(x)]
