import torch
import torch.nn as nn
import torch.nn.functional as F
import math
import numpy as np
from torch.autograd import Variable


class Embeddings(nn.Module):
    def __init__(self,max_len,dim):
        super().__init__()
        self.embed = nn.Embedding(max_len,dim)

    def forward(self,x):
        return self.embed(x)

class PositionEmbeddings(nn.Module):
    def __init__(self,max_len,dim,dropout=0.1):
        super(PositionEmbeddings,self).__init__()
        d_model = 1000
        self.pe = torch.zeros(max_len, dim) # (100,10)
        self.position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1) # (100,) (100,1) 
        self.sub_dim = torch.arange(0, dim, 2).float()*-(math.log(1000.0))
        self.dim = dim
        
        self.dropout = nn.Dropout(dropout)

    def forward(self,x):
        self.pe[:,0::2] = torch.sin(self.sub_dim)
        self.pe[:,1::2] = torch.cos(self.sub_dim)





if __name__ == '__main__':
    embed = Embeddings(100,512)
    for name,d in embed.named_parameters():
        print(name,d.shape)
    input = torch.randint(0,100,size=(5,10))
    output = embed(input)
    print(output.size())