bourdoiscatie commited on
Commit
037e2b8
1 Parent(s): 425c5a3

Update custom_heads_flash_t5.py

Browse files
Files changed (1) hide show
  1. custom_heads_flash_t5.py +4 -0
custom_heads_flash_t5.py CHANGED
@@ -257,6 +257,7 @@ class FlashT5ForQuestionAnswering(FlashT5PreTrainedModel):
257
  Labels for position (index) of the end of the labelled span for computing the token classification loss.
258
  Positions are clamped to the length of the sequence (*sequence_length*). Position outside of the sequence
259
  are not taken into account for computing the loss.
 
260
  Returns:
261
  """
262
  return_dict = return_dict if return_dict is not None else self.config.use_return_dict
@@ -344,9 +345,12 @@ class FlashT5ForQuestionAnswering(FlashT5PreTrainedModel):
344
  ) -> Union[Tuple, QuestionAnsweringModelOutput]:
345
  r"""
346
  Returns:
 
347
  Example:
 
348
  ```python
349
  >>> from transformers import AutoTokenizer, MTxEncoderForQuestionAnswering
 
350
  >>> tokenizer = AutoTokenizer.from_pretrained("MTx-small")
351
  >>> model = MTxEncoderForQuestionAnswering.from_pretrained("MTx-small")
352
  >>> input_ids = tokenizer(
 
257
  Labels for position (index) of the end of the labelled span for computing the token classification loss.
258
  Positions are clamped to the length of the sequence (*sequence_length*). Position outside of the sequence
259
  are not taken into account for computing the loss.
260
+
261
  Returns:
262
  """
263
  return_dict = return_dict if return_dict is not None else self.config.use_return_dict
 
345
  ) -> Union[Tuple, QuestionAnsweringModelOutput]:
346
  r"""
347
  Returns:
348
+
349
  Example:
350
+
351
  ```python
352
  >>> from transformers import AutoTokenizer, MTxEncoderForQuestionAnswering
353
+
354
  >>> tokenizer = AutoTokenizer.from_pretrained("MTx-small")
355
  >>> model = MTxEncoderForQuestionAnswering.from_pretrained("MTx-small")
356
  >>> input_ids = tokenizer(