qilowoq commited on
Commit
18e4d36
1 Parent(s): 75373b1

Upload model

Browse files
Files changed (3) hide show
  1. AbLang_roberta_model.py +3 -1
  2. config.json +2 -2
  3. pytorch_model.bin +2 -2
AbLang_roberta_model.py CHANGED
@@ -16,7 +16,8 @@ class RobertaEmbeddingsV2(RobertaEmbeddings):
16
  inputs_embeds: Optional[torch.FloatTensor] = None,
17
  past_key_values_length: int = 0,
18
  ) -> torch.Tensor:
19
- inputs_embeds = self.word_embeddings(input_ids)
 
20
  position_ids = self.create_position_ids_from_input_ids(input_ids)
21
  position_embeddings = self.position_embeddings(position_ids)
22
  embeddings = inputs_embeds + position_embeddings
@@ -37,5 +38,6 @@ class RobertaForMaskedLMV2(RobertaForMaskedLM):
37
  def __init__(self, config):
38
  super().__init__(config)
39
  self.roberta = RobertaModelV2(config, add_pooling_layer=False)
 
40
 
41
 
 
16
  inputs_embeds: Optional[torch.FloatTensor] = None,
17
  past_key_values_length: int = 0,
18
  ) -> torch.Tensor:
19
+ if inputs_embeds is None:
20
+ inputs_embeds = self.word_embeddings(input_ids)
21
  position_ids = self.create_position_ids_from_input_ids(input_ids)
22
  position_embeddings = self.position_embeddings(position_ids)
23
  embeddings = inputs_embeds + position_embeddings
 
38
  def __init__(self, config):
39
  super().__init__(config)
40
  self.roberta = RobertaModelV2(config, add_pooling_layer=False)
41
+ self.post_init()
42
 
43
 
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "add_pooling_layer": false,
3
  "architectures": [
4
- "RobertaForMaskedLMV2"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "auto_map": {
8
- "AutoModelForMaskedLM": "AbLang_roberta_model.RobertaForMaskedLMV2"
9
  },
10
  "bos_token_id": 0,
11
  "classifier_dropout": null,
 
1
  {
2
  "add_pooling_layer": false,
3
  "architectures": [
4
+ "RobertaModelV2"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "auto_map": {
8
+ "AutoModel": "AbLang_roberta_model.RobertaModelV2"
9
  },
10
  "bos_token_id": 0,
11
  "classifier_dropout": null,
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:508c7ea07c28cf327ae680d5b7d1ce72def49c8099991bbbe40997a772055dd7
3
- size 343306045
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b1abd839c12b1f3494a4e425009182a2d93e7c8cb85fcef1c6b849b6c589772
3
+ size 340860389