import torch
import torch.nn as nn
from utils import load_word_vec, load_vocabs_from_wv
from typing import Optional


class WordEmbedding(nn.Module):

    def __init__(self,
                 pretrained_path_or_vocab_size,
                 emb_dim,
                 pad_idx=0,
                 vectors=None,
                 cached_wv_path=None,
                 cached_vocab_path=None,
                 freeze=False):
        super(WordEmbedding, self).__init__()
        if isinstance(pretrained_path_or_vocab_size, str):
            vectors = load_word_vec(pretrained_path_or_vocab_size, cached_wv_path, cached_vocab_path)
            vocabs = load_vocabs_from_wv(pretrained_path_or_vocab_size, cached_vocab_path)
            vocab_size = len(vocabs)
        elif isinstance(pretrained_path_or_vocab_size, int):
            vocab_size = pretrained_path_or_vocab_size
        else:
            raise Exception("Expect str/int , but given %s " % type(pretrained_path_or_vocab_size))
        self.embedding = nn.Embedding(vocab_size, emb_dim, pad_idx)
        if vectors is not None:
            self.embedding = self.embedding.from_pretrained(vectors, freeze=freeze, padding_idx=pad_idx)

    def forward(self, sentences, masks=None):
        emb = self.embedding(sentences)
        return emb
