import torch
import torch.nn as nn
from models.embedding.word_embedding import WordEmbedding


class CharWordEmbedding(nn.Module):

    def __init__(self,
                 pretrained_path_or_vocab_size: str,
                 emb_dim: int,
                 pad_idx: int = 0,
                 use_pe: bool = True,
                 max_len: int = 100,
                 vectors=None,
                 cached_wv_path=None,
                 cached_vocab_path=None,
                 device=torch.device("cuda:0")):
        super(CharWordEmbedding, self).__init__()
        self.word_embedding = WordEmbedding(pretrained_path_or_vocab_size, emb_dim, pad_idx, vectors, cached_wv_path, cached_vocab_path, freeze=True)
        self.layerNorm = nn.LayerNorm(emb_dim)
        self.dropout = nn.Dropout(0.5)
        self.use_pe = use_pe
        if self.use_pe:
            self.pe = nn.Embedding(max_len, emb_dim)
        else:
            self.register_parameter("pe", None)
        self.to(device)

    def forward(self, sentences, tokens, masks):
        batch_size, max_len = sentences.size()
        char_emb = self.word_embedding(sentences, masks)
        word_emb = self.word_embedding(tokens, masks)
        emb = char_emb + word_emb
        device = sentences.device
        if self.use_pe:
            position_idx = torch.arange(0, max_len, 1.0, dtype=torch.long)
            position_idx = position_idx.unsqueeze_(0).expand_as(sentences).to(device)
            pos_emb = self.pe(position_idx)
            emb = emb + pos_emb
        out = self.layerNorm(emb)
        out = self.dropout(out)
        return out
