Markus28 commited on
Commit
65647ba
1 Parent(s): 3573e5b

fixed fill_ error, updated config

Browse files
Files changed (2) hide show
  1. configuration_bert.py +2 -0
  2. modeling_bert.py +1 -1
configuration_bert.py CHANGED
@@ -81,6 +81,7 @@ class JinaBertConfig(PretrainedConfig):
81
  fused_dropout_add_ln=False,
82
  fused_bias_fc=False,
83
  pad_vocab_size_multiple=1,
 
84
  **kwargs,
85
  ):
86
  assert 'position_embedding_type' not in kwargs
@@ -106,3 +107,4 @@ class JinaBertConfig(PretrainedConfig):
106
  self.fused_dropout_add_ln = fused_dropout_add_ln
107
  self.fused_bias_fc = fused_bias_fc
108
  self.pad_vocab_size_multiple = pad_vocab_size_multiple
 
 
81
  fused_dropout_add_ln=False,
82
  fused_bias_fc=False,
83
  pad_vocab_size_multiple=1,
84
+ num_tasks=0,
85
  **kwargs,
86
  ):
87
  assert 'position_embedding_type' not in kwargs
 
107
  self.fused_dropout_add_ln = fused_dropout_add_ln
108
  self.fused_bias_fc = fused_bias_fc
109
  self.pad_vocab_size_multiple = pad_vocab_size_multiple
110
+ self.num_tasks = num_tasks
modeling_bert.py CHANGED
@@ -347,7 +347,7 @@ class BertModel(BertPreTrainedModel):
347
  # pretraining. When we start using task types during embedding training,
348
  # we want the model to behave exactly as in pretraining (i.e. task types
349
  # have no effect).
350
- self.task_type_embeddings.weight.fill_(0)
351
 
352
  def forward(
353
  self,
 
347
  # pretraining. When we start using task types during embedding training,
348
  # we want the model to behave exactly as in pretraining (i.e. task types
349
  # have no effect).
350
+ nn.init.zeros_(self.task_type_embeddings.weight)
351
 
352
  def forward(
353
  self,