alycialee commited on
Commit
4a1d153
1 Parent(s): 725617d

fix vocab size in config

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -5,6 +5,7 @@
5
  "BertForMaskedLM"
6
  ],
7
  "attention_probs_dropout_prob": 0.0,
 
8
  "auto_map": {
9
  "AutoConfig": "configuration_bert.BertConfig",
10
  "AutoModelForMaskedLM": "bert_layers.BertForMaskedLM"
@@ -27,7 +28,7 @@
27
  "transformers_version": "4.28.1",
28
  "type_vocab_size": 2,
29
  "use_cache": true,
30
- "vocab_size": 30522,
31
  "long_conv_l_max": 128,
32
  "long_conv_kernel_learning_rate": 1e-3,
33
  "hyena_lr_pos_emb": 1e-5,
@@ -35,7 +36,6 @@
35
  "hyena_wd": 0.1,
36
  "hyena_emb_dim": 5,
37
  "hyena_filter_order": 128,
38
- "bidirectional": true,
39
  "residual_long_conv": true,
40
  "use_glu_mlp": true,
41
  "use_monarch_mlp": true,
 
5
  "BertForMaskedLM"
6
  ],
7
  "attention_probs_dropout_prob": 0.0,
8
+ "bidirectional": true,
9
  "auto_map": {
10
  "AutoConfig": "configuration_bert.BertConfig",
11
  "AutoModelForMaskedLM": "bert_layers.BertForMaskedLM"
 
28
  "transformers_version": "4.28.1",
29
  "type_vocab_size": 2,
30
  "use_cache": true,
31
+ "vocab_size": 30528,
32
  "long_conv_l_max": 128,
33
  "long_conv_kernel_learning_rate": 1e-3,
34
  "hyena_lr_pos_emb": 1e-5,
 
36
  "hyena_wd": 0.1,
37
  "hyena_emb_dim": 5,
38
  "hyena_filter_order": 128,
 
39
  "residual_long_conv": true,
40
  "use_glu_mlp": true,
41
  "use_monarch_mlp": true,