import paddle as pp
from paddle import nn


class DebertaLayerNorm(nn.Layer):
    """LayerNorm module in the TF style (epsilon inside the square root)."""

    def __init__(self, size, eps=1e-12):
        super().__init__()
        w = pp.ones((size, ))
        b = pp.zeros((size, ))

        self.weight = pp.create_parameter(
            w.shape, w.dtype,
            default_initializer=nn.initializer.Assign(w))
        # self.weight = nn.Parameter(torch.ones(size))
        self.bias = pp.create_parameter(
            b.shape, b.dtype, is_bias=True,
            default_initializer=nn.initializer.Assign(b))
        # self.bias = nn.Parameter(torch.zeros(size))
        self.variance_epsilon = eps

    def forward(self, hidden_states):
        input_type = hidden_states.dtype
        hidden_states = hidden_states.astype(pp.float32)
        mean = hidden_states.mean(-1, keepdim=True)
        variance = (hidden_states - mean).pow(2).mean(-1, keepdim=True)
        hidden_states = (hidden_states - mean) / pp.sqrt(variance + self.variance_epsilon)
        hidden_states = hidden_states.astype(input_type)
        y = self.weight * hidden_states + self.bias
        return y
