import torch
import torch.nn as nn
from torch.nn import Dropout
import torch.nn.functional as F
import math
from a_1_embedding import Embedding

Device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(Device)


class Laynormal(nn.Module):
    def __init__(self,d_model):
        super(Laynormal, self).__init__()
        self.offset = 1e-12
        self.gama = nn.Parameter(torch.ones(d_model,device=Device))
        self.bias = nn.Parameter(torch.ones(d_model,device=Device))
    
    def forward(self, x):
        x = x.to(Device)
        x_mean = torch.mean(x, dim=1, keepdim=True)
        x_var = torch.var(x, dim=1,unbiased=False, keepdim=True)
        
        x = (x - x_mean) / torch.sqrt(x_var + self.offset)
        return self.gama * x + self.bias
        



if __name__ == '__main__':
    x = torch.randn(5, 10, 512)
    x = x*1200
    print(x)
    lay_normal = Laynormal(512)
    res = lay_normal(x)
    print(res)