import torch
import torch.nn as nn
import math
from torch.autograd import Variable
import matplotlib.pyplot as plt
import numpy as np

class Embeddings(nn.Module):
    def __init__(self, vocab_size, embedding_size):
        super(Embeddings,self).__init__()
        self.vocab_size = vocab_size
        self.embedding_size = embedding_size
        self.lut = nn.Embedding(vocab_size,embedding_size)

    def forward(self,x):
        x = self.lut(x) * math.sqrt(self.embedding_size)
        return x

class PositionalEncoding(nn.Module):
    def __init__(self, d_model, dropout=0.1, max_len=5000):
        super(PositionalEncoding,self).__init__()
        self.dropout = dropout

        pe = torch.zeros(max_len,d_model)
        position = torch.arange(0,max_len).unsqueeze(1)
        div_term = torch.exp( torch.arange(0,d_model,2) * -(math.log(10000.0) / d_model) )
        my_matmulres = position * div_term
        pe[:, 0::2] = torch.sin(my_matmulres)
        pe[:, 1::2] = torch.cos(my_matmulres)

        pe = pe.unsqueeze(0)
        self.register_buffer('pe',pe)

    def forward(self,x):
        tmp = x.size()[1]
        x = x + Variable(self.pe[:,:x.size()[1]],requires_grad = False)
        return x


## test module
def test_PositionalEncoding():
    x = torch.tensor([[100, 2, 421, 508], [491, 998, 1, 221]])
    myembeddings = Embeddings(1000, 512)
    print('myembeddings-->', myembeddings)
    embed_res = myembeddings(x)
    print('embed_res-->', embed_res.shape, embed_res)
    mypositionalencoding = PositionalEncoding(d_model=512,dropout=0.1,max_len=60)
    print('mypositionalencoding-->',mypositionalencoding)
    pe_res = mypositionalencoding(embed_res)
    print('afteraddpositionalencoding-->',pe_res.shape,pe_res)
    res = pe_res-embed_res
    print('after-minis',res)


def test_Embeddings():
    x = torch.tensor([[100,2,421,508],[491,998,1,221]])
    myembeddings = Embeddings(1000,512)
    print('myembeddings-->',myembeddings)
    embed_res = myembeddings(x)
    print('embed_res-->',embed_res.shape,embed_res)
    
    pass

if __name__ == '__main__':
    test_PositionalEncoding()
