File size: 1,503 Bytes
bac8bc3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4927150
bac8bc3
 
4927150
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
from transformers.models.bert.modeling_bert import BertEncoder, BertPooler, BertEmbeddings, BertForMaskedLM, MaskedLMOutput
from transformers import BertModel
from typing import List, Optional, Tuple, Union
import torch

class BertEmbeddingsV2(BertEmbeddings):
    def __init__(self, config):
        super().__init__(config)
        self.pad_token_id = config.pad_token_id
        self.position_embeddings = torch.nn.Embedding(config.max_position_embeddings, config.hidden_size, padding_idx=0) # here padding_idx is always 0

    def forward(
        self,
        input_ids: torch.LongTensor,
        token_type_ids: Optional[torch.LongTensor] = None,
        position_ids: Optional[torch.LongTensor] = None,
        inputs_embeds: Optional[torch.FloatTensor] = None,
        past_key_values_length: int = 0,
    ) -> torch.Tensor:
        inputs_embeds = self.word_embeddings(input_ids)
        position_ids = self.create_position_ids_from_input_ids(input_ids)  
        position_embeddings = self.position_embeddings(position_ids)
        embeddings = inputs_embeds + position_embeddings
        return self.dropout(self.LayerNorm(embeddings))
    
    def create_position_ids_from_input_ids(self, input_ids: torch.LongTensor) -> torch.Tensor:
        mask = input_ids.ne(self.pad_token_id).int()
        return torch.cumsum(mask, dim=1).long() * mask


class BertModelV2(BertModel):
    def __init__(self, config):
        super().__init__(config)
        self.embeddings = BertEmbeddingsV2(config)