
import torch
import torch.nn as nn
from transformers import BertModel, BertTokenizer


class bert:
    """
    需要用到的bert相关组件
    """
    def __init__(self, config):

        self.config = config
        self.bert = BertModel.from_pretrained(self.config.bert_path)

        self.embedding = self.bert.embeddings # 实例化BertEmbeddings类
        self.bert_encoder = self.bert.encoder

        self.tokenizer = BertTokenizer.from_pretrained(self.config.vocab_file)  # 加载tokenizer
        self.masked_e = self.embedding(torch.tensor([[self.tokenizer.mask_token_id]], dtype=torch.long))

        self.vocab_size = self.tokenizer.vocab_size  # 词汇量


class biGruDetector(nn.Module):

    def __init__(self, input_size, hidden_size, num_layer=1):

        super(biGruDetector, self).__init__()
        self.rnn = nn.GRU(input_size, hidden_size, num_layers=num_layer,
                          bidirectional=True, batch_first=True)

        self.linear = nn.Linear(hidden_size * 2, 1)


    def forward(self, inp):

        rnn_output, _ = self.rnn(inp)

        output = nn.Sigmoid()(self.linear(rnn_output))

        return output


class softMaskedBert(nn.Module):

    def __init__(self, config, **kwargs):

        super(softMaskedBert, self).__init__()
        self.config = config  # 加载参数管理器
        self.vocab_size = kwargs['vocab_size']
        self.masked_e = kwargs['masked_e']
        self.bert_encoder = kwargs['bert_encoder']

        self.linear = nn.Linear(self.config.embedding_size, self.vocab_size)  # 线性层，没啥好说的
        self.log_softmax = nn.LogSoftmax(dim=-1)
        # LogSoftmax就是对softmax取log

    def forward(self, bert_embedding, p, input_mask=None):

        soft_bert_embedding = p * self.masked_e + (1 - p) * bert_embedding  # detector输出和[mask]的embedding加权求和
        bert_out = self.bert_encoder(hidden_states=soft_bert_embedding, attention_mask=input_mask)

        h = bert_out[0] + bert_embedding  # 残差
        out = self.log_softmax(self.linear(h))  # 线性层，再softmax输出
        # out维度：[batch_size,sequence_length,num_vocabulary]
        return out

