import torch
import torch.nn as nn
import numpy as np


class CharEmbedding(nn.Module):

    def __init__(self, vocab_size, emb_dim, max_len, pad_idx=0, device=torch.device("cuda:0")):
        super(CharEmbedding, self).__init__()
        self.embeddings = nn.Embedding(vocab_size, emb_dim, pad_idx)
        self.pe = self.positional_encoding(max_len, emb_dim, device)
        self.dropout = nn.Dropout(0.5)
        self.layerNorm = nn.LayerNorm(emb_dim)
        self.to(device)

    @staticmethod
    def positional_encoding(max_len, emb_dim, device):
        pe = torch.LongTensor(max_len, emb_dim)
        pos = torch.arange(0, max_len, 1.0).unsqueeze(1)
        k = torch.exp(-np.log(10000) * torch.arange(0, emb_dim, 2.) / emb_dim)
        pe[:, 0::2] = torch.sin(pos * k)
        pe[:, 1::2] = torch.cos(pos * k)
        return pe.to(device)

    def forward(self, sentences, tokens, masks=None):
        max_len = sentences.size(-1)
        emb = self.embeddings(sentences)
        emb = emb + self.pe[:max_len]
        emb = self.dropout(emb)
        emb = self.layerNorm(emb)
        return emb
