import torch
import torch.nn as nn
from lib.bert.model import BertConfig, BertModel
from utils import load_word_vec, load_vocabs_from_wv

from models.embedding.char_embedding import  CharEmbedding
from models.embedding.word_embedding import WordEmbedding
from models.embedding.bert_embeding import BertEmbedding


class JointCharWordEmbedding(nn.Module):

    def __init__(self, char_pretrained_path, word_vec_path, char_emb_dim, word_emb_dim, max_len, keep_prob, pad_idx, use_cuda, cached_wv_path, cached_vocab_path):
        super(JointCharWordEmbedding, self).__init__()
        self.device = torch.device("cuda:0") if use_cuda else torch.device("cpu:0")
        if isinstance(char_pretrained_path, int):
            self.char_embeddings = nn.Embedding(char_pretrained_path, char_emb_dim, pad_idx)
        elif isinstance(char_pretrained_path, str):
            if char_pretrained_path[:4] == "char":  # 中间用 '_' 分割
                self.char_embeddings = CharEmbedding(int(char_pretrained_path[5:]),  char_emb_dim, max_len, pad_idx, self.device)
            elif char_pretrained_path[:4] == "word":
                self.char_embeddings = WordEmbedding(char_pretrained_path[5:], char_emb_dim, pad_idx, None, cached_wv_path, cached_vocab_path, True)
            elif char_pretrained_path[:4] == "bert":
                self.char_embeddings = BertEmbedding(char_pretrained_path[5:], 256, 256, 768, keep_prob, pad_idx, False)

        if isinstance(word_vec_path, int):
            self.word_embeddings = nn.Embedding(word_vec_path, word_emb_dim, pad_idx)
        elif isinstance(word_vec_path, str):
            self.word_embeddings = WordEmbedding(word_vec_path, word_emb_dim, pad_idx, None, cached_wv_path, cached_vocab_path, True)

        self.dropout = nn.Dropout(keep_prob)
        self.to(self.device)

    def forward(self, sentences, tokens, masks=None):
        if isinstance(self.char_embeddings, CharEmbedding):
            char_emb = self.char_embeddings(sentences, tokens, masks)
        elif isinstance(self.char_embeddings, WordEmbedding):
            char_emb = self.char_embeddings(sentences, masks)
        elif isinstance(self.char_embeddings, BertEmbedding):
            char_emb = self.char_embeddings(sentences, tokens, masks)
        else:
            char_emb = self.char_embeddings(sentences)

        word_emb = self.word_embeddings(tokens)
        emb = torch.cat([char_emb, word_emb], dim=2)
        emb = self.dropout(emb)
        return emb

