import torch
from torch.nn.modules.lazy import LazyModuleMixin


class RMSNorm(LazyModuleMixin, torch.nn.Module):
    def __init__(self, dim: int = -1, *, eps: float = 1e-6):
        super().__init__()
        self.dim = dim
        self.eps = eps
        self.weight = torch.nn.UninitializedParameter()

    def initialize_parameters(self, x: torch.Tensor):
        weight_shape = [1] * len(x.shape)
        weight_shape[self.dim] = x.shape[self.dim]
        self.weight = torch.nn.Parameter(torch.ones(weight_shape, dtype=x.dtype, device=x.device))

    def forward(self, x: torch.Tensor) -> torch.Tensor:
        x = rms_norm(x, dim=self.dim, eps=self.eps)
        x = x * self.weight
        return x


def rms_norm(
    x: torch.Tensor,
    dim: int = -1, *,
    eps: torch.Tensor | float = 1e-6
) -> torch.Tensor:
    eps = torch.asarray(eps, dtype=x.dtype, device=x.device)
    x_ms = torch.mean(torch.square(x), dim=dim, keepdim=True)
    x = x * torch.rsqrt(x_ms + eps)
    return x
