import logging
import oneflow as torch
import oneflow.nn as nn
import oneflow.nn.functional as F

logger = logging.getLogger(__name__)


class PositionwiseFeedForward(nn.Module):
    """Positionwise feed forward

    :param int idim: input dimenstion
    :param int hidden_units: number of hidden units
    :param float dropout_rate: dropout rate
    """

    def __init__(self, idim, hidden_units, dropout_rate, activation='relu', apply_initialization=False):
        super(PositionwiseFeedForward, self).__init__()
        self.activation = activation
        self.apply_initialization = apply_initialization
        self.w_1 = nn.Linear(idim, hidden_units * 2 if activation == 'glu' else hidden_units)
        self.w_2 = nn.Linear(hidden_units, idim)
        self.dropout = nn.Dropout(dropout_rate)

        if self.apply_initialization:
            self.init_parameters()

    def forward(self, x):
        x = self.w_1(x)
        if self.activation == 'relu':
            x = F.relu(x)
        elif self.activation == 'glu':
            x = F.glu(x)
        elif self.activation == 'gelu':
            x = F.gelu(x)
        elif self.activation == 'swish':
            x = x * torch.sigmoid(x)
        else:
            raise NotImplementedError
        return self.w_2(self.dropout(x))

    def init_parameters(self):
        """Initialize parameters with Xavier uniform distribution."""
        for _, p in self.named_parameters():
            if p.dim() == 1:
                nn.init.constant_(p, 0.)  # bias
            elif p.dim() in [2, 3, 4]:
                nn.init.xavier_uniform_(p)
        logger.debug('===== Initialize %s with Xavier uniform distribution =====' % self.__class__.__name__)

