import torch 
from torch import nn
#加速模型收敛，提高稳定性
class LayerNorm(nn.Module):
    def __init__(self, dim_model, eps=1e-12):
        super(LayerNorm, self).__init__()
        self.gamma = nn.Parameter(torch.ones(dim_model))
        self.beta = nn.Parameter(torch.zeros(dim_model))
        self.eps = eps
    
    def forward(self, x):
        print(x.shape)
        mean = x.mean(dim=-1, keepdim=True)
        std = x.std(dim=-1, unbiased=True, keepdim=True)
        output = (x - mean) / (std + self.eps)
        output = self.gamma * output + self.beta
        return output
        
