davda54 commited on
Commit
619ace9
1 Parent(s): fb5127b
Files changed (1) hide show
  1. modeling_deberta.py +1 -27
modeling_deberta.py CHANGED
@@ -35,7 +35,7 @@ from transformers.modeling_outputs import (
35
  )
36
  from transformers.modeling_utils import PreTrainedModel
37
  from transformers.pytorch_utils import softmax_backward_data
38
- from transformers.utils import add_code_sample_docstrings, add_start_docstrings, add_start_docstrings_to_model_forward, logging
39
  from .configuration_deberta import DebertaV2Config
40
 
41
 
@@ -1418,12 +1418,6 @@ class DebertaV2ForSequenceClassification(DebertaV2PreTrainedModel):
1418
  def set_input_embeddings(self, new_embeddings):
1419
  self.deberta.set_input_embeddings(new_embeddings)
1420
 
1421
- @add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
1422
- @add_code_sample_docstrings(
1423
- checkpoint=_CHECKPOINT_FOR_DOC,
1424
- output_type=SequenceClassifierOutput,
1425
- config_class=_CONFIG_FOR_DOC,
1426
- )
1427
  # Copied from transformers.models.deberta.modeling_deberta.DebertaForSequenceClassification.forward with Deberta->DebertaV2
1428
  def forward(
1429
  self,
@@ -1517,12 +1511,6 @@ class DebertaV2ForTokenClassification(DebertaV2PreTrainedModel):
1517
  # Initialize weights and apply final processing
1518
  self.post_init()
1519
 
1520
- @add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
1521
- @add_code_sample_docstrings(
1522
- checkpoint=_CHECKPOINT_FOR_DOC,
1523
- output_type=TokenClassifierOutput,
1524
- config_class=_CONFIG_FOR_DOC,
1525
- )
1526
  def forward(
1527
  self,
1528
  input_ids: Optional[torch.Tensor] = None,
@@ -1582,14 +1570,6 @@ class DebertaV2ForQuestionAnswering(DebertaV2PreTrainedModel):
1582
  # Initialize weights and apply final processing
1583
  self.post_init()
1584
 
1585
- @add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
1586
- @add_code_sample_docstrings(
1587
- checkpoint=_CHECKPOINT_FOR_DOC,
1588
- output_type=QuestionAnsweringModelOutput,
1589
- config_class=_CONFIG_FOR_DOC,
1590
- qa_target_start_index=_QA_TARGET_START_INDEX,
1591
- qa_target_end_index=_QA_TARGET_END_INDEX,
1592
- )
1593
  # Copied from transformers.models.deberta.modeling_deberta.DebertaForQuestionAnswering.forward with Deberta->DebertaV2
1594
  def forward(
1595
  self,
@@ -1688,12 +1668,6 @@ class DebertaV2ForMultipleChoice(DebertaV2PreTrainedModel):
1688
  def set_input_embeddings(self, new_embeddings):
1689
  self.deberta.set_input_embeddings(new_embeddings)
1690
 
1691
- @add_start_docstrings_to_model_forward(DEBERTA_INPUTS_DOCSTRING.format("batch_size, sequence_length"))
1692
- @add_code_sample_docstrings(
1693
- checkpoint=_CHECKPOINT_FOR_DOC,
1694
- output_type=MultipleChoiceModelOutput,
1695
- config_class=_CONFIG_FOR_DOC,
1696
- )
1697
  def forward(
1698
  self,
1699
  input_ids: Optional[torch.Tensor] = None,
 
35
  )
36
  from transformers.modeling_utils import PreTrainedModel
37
  from transformers.pytorch_utils import softmax_backward_data
38
+ from transformers.utils import add_code_sample_docstrings, add_start_docstrings, logging
39
  from .configuration_deberta import DebertaV2Config
40
 
41
 
 
1418
  def set_input_embeddings(self, new_embeddings):
1419
  self.deberta.set_input_embeddings(new_embeddings)
1420
 
 
 
 
 
 
 
1421
  # Copied from transformers.models.deberta.modeling_deberta.DebertaForSequenceClassification.forward with Deberta->DebertaV2
1422
  def forward(
1423
  self,
 
1511
  # Initialize weights and apply final processing
1512
  self.post_init()
1513
 
 
 
 
 
 
 
1514
  def forward(
1515
  self,
1516
  input_ids: Optional[torch.Tensor] = None,
 
1570
  # Initialize weights and apply final processing
1571
  self.post_init()
1572
 
 
 
 
 
 
 
 
 
1573
  # Copied from transformers.models.deberta.modeling_deberta.DebertaForQuestionAnswering.forward with Deberta->DebertaV2
1574
  def forward(
1575
  self,
 
1668
  def set_input_embeddings(self, new_embeddings):
1669
  self.deberta.set_input_embeddings(new_embeddings)
1670
 
 
 
 
 
 
 
1671
  def forward(
1672
  self,
1673
  input_ids: Optional[torch.Tensor] = None,