import math
import paddle
import paddle.nn as nn

from .base import BaseHead
from ..registry import HEADS
from ..weight_init import weight_init_


@HEADS.register()
class CTRGCNHead(BaseHead):
    """
    Head for CTR-GCN model.
    Args:
        in_channels: int, input feature channels. Default: 64.
        num_classes: int, output the number of classes.
        drop_out: float, dropout ratio of layer. Default: 0.
    """

    def __init__(self, in_channels=64, num_classes=10, drop_out=0, **kwargs):
        super().__init__(num_classes, in_channels, **kwargs)
        self.in_channels = in_channels
        self.drop_out = drop_out

        self.fc = nn.Linear(self.in_channels * 4, self.num_classes)
        if drop_out:
            self.drop_out = nn.Dropout(self.drop_out)
        else:
            self.drop_out = lambda x: x

    def init_weights(self):
        """Initiate the parameters.
        """
        for layer in self.sublayers():
            if isinstance(layer, nn.Conv2D):
                weight_init_(layer.weight,
                             'Normal',
                             mean=0.0,
                             std=math.sqrt(2. / self.num_classes))

    def forward(self, x):
        """Define how the head is going to run.
        """
        # N*M,C,1,1
        x = self.drop_out(x)
        x = self.fc(x)
        return x

