DanielHesslow commited on
Commit
b5aa510
1 Parent(s): ca09939
Files changed (1) hide show
  1. rita_modeling.py +11 -10
rita_modeling.py CHANGED
@@ -226,19 +226,20 @@ class RITAModel(PreTrainedModel):
226
  def forward(
227
  self,
228
  input_ids=None,
229
- past_key_values=None,
230
  attention_mask=None,
231
- token_type_ids=None,
232
- position_ids=None,
233
- head_mask=None,
234
  inputs_embeds=None,
235
- encoder_hidden_states=None,
236
- encoder_attention_mask=None,
237
  labels=None,
238
- use_cache=None,
239
- output_attentions=None,
240
- output_hidden_states=None,
241
- return_dict=None) -> torch.FloatTensor:
 
242
 
243
  if inputs_embeds == None:
244
  x = self.embedding(input_ids) # N x L x D
 
226
  def forward(
227
  self,
228
  input_ids=None,
229
+ past_key_values=None, # NOT USED
230
  attention_mask=None,
231
+ token_type_ids=None, # NOT USED
232
+ position_ids=None, # NOT USED
233
+ head_mask=None, # NOT USED
234
  inputs_embeds=None,
235
+ encoder_hidden_states=None, # NOT USED
236
+ encoder_attention_mask=None, # NOT USED
237
  labels=None,
238
+ use_cache=None, # NOT USED
239
+ output_attentions=None, # NOT USED
240
+ output_hidden_states=None, # NOT USED
241
+ return_dict=None # NOT USED
242
+ ) -> torch.FloatTensor:
243
 
244
  if inputs_embeds == None:
245
  x = self.embedding(input_ids) # N x L x D