davda54 commited on
Commit
33d2d19
1 Parent(s): 3b2a8c4

Update modeling_ltgbert.py

Browse files
Files changed (1) hide show
  1. modeling_ltgbert.py +2 -26
modeling_ltgbert.py CHANGED
@@ -24,7 +24,7 @@ import torch.nn as nn
24
  import torch.nn.functional as F
25
  from torch.utils import checkpoint
26
 
27
- from configuration_ltgbert import LtgBertConfig
28
  from transformers.modeling_utils import PreTrainedModel
29
  from transformers.activations import gelu_new
30
  from transformers.modeling_outputs import (
@@ -39,34 +39,10 @@ from transformers.pytorch_utils import softmax_backward_data
39
  from transformers.utils import add_start_docstrings, add_start_docstrings_to_model_forward
40
 
41
 
42
- _CHECKPOINT_FOR_DOC = "ltg/bnc-bert-span"
43
  _CONFIG_FOR_DOC = "LtgBertConfig"
44
 
45
 
46
- LTG_BERT_PRETRAINED_MODEL_ARCHIVE_LIST = [
47
- "bnc-bert-span",
48
- "bnc-bert-span-2x",
49
- "bnc-bert-span-0.5x",
50
- "bnc-bert-span-0.25x",
51
- "bnc-bert-span-order",
52
- "bnc-bert-span-document",
53
- "bnc-bert-span-word",
54
- "bnc-bert-span-subword",
55
-
56
- "norbert3-xs",
57
- "norbert3-small",
58
- "norbert3-base",
59
- "norbert3-large",
60
-
61
- "norbert3-oversampled-base",
62
- "norbert3-ncc-base",
63
- "norbert3-nak-base",
64
- "norbert3-nb-base",
65
- "norbert3-wiki-base",
66
- "norbert3-c4-base"
67
- ]
68
-
69
-
70
  class Encoder(nn.Module):
71
  def __init__(self, config, activation_checkpointing=False):
72
  super().__init__()
 
24
  import torch.nn.functional as F
25
  from torch.utils import checkpoint
26
 
27
+ from .configuration_ltgbert import LtgBertConfig
28
  from transformers.modeling_utils import PreTrainedModel
29
  from transformers.activations import gelu_new
30
  from transformers.modeling_outputs import (
 
39
  from transformers.utils import add_start_docstrings, add_start_docstrings_to_model_forward
40
 
41
 
42
+ _CHECKPOINT_FOR_DOC = "ltg/ltg-bert-bnc"
43
  _CONFIG_FOR_DOC = "LtgBertConfig"
44
 
45
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
46
  class Encoder(nn.Module):
47
  def __init__(self, config, activation_checkpointing=False):
48
  super().__init__()