XiangD-OSU commited on
Commit
e53d7cf
1 Parent(s): 9f10f15

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -6
config.json CHANGED
@@ -5,9 +5,7 @@
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
8
- "classifier_dropout": null,
9
  "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
@@ -19,10 +17,6 @@
19
  "num_attention_heads": 12,
20
  "num_hidden_layers": 12,
21
  "pad_token_id": 1,
22
- "position_embedding_type": "absolute",
23
- "torch_dtype": "float32",
24
- "transformers_version": "4.10.2",
25
  "type_vocab_size": 1,
26
- "use_cache": true,
27
  "vocab_size": 50266
28
  }
 
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
8
  "eos_token_id": 2,
 
9
  "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
 
17
  "num_attention_heads": 12,
18
  "num_hidden_layers": 12,
19
  "pad_token_id": 1,
 
 
 
20
  "type_vocab_size": 1,
 
21
  "vocab_size": 50266
22
  }