Morgan Funtowicz commited on
Commit
b7f23e0
1 Parent(s): 40dd217

Change activation to use ReLU

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attention_probs_dropout_prob": 0.1,
7
  "finetuning_task": "sst2",
8
  "gradient_checkpointing": false,
9
- "hidden_act": "gelu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "initializer_range": 0.02,
 
6
  "attention_probs_dropout_prob": 0.1,
7
  "finetuning_task": "sst2",
8
  "gradient_checkpointing": false,
9
+ "hidden_act": "relu",
10
  "hidden_dropout_prob": 0.1,
11
  "hidden_size": 768,
12
  "initializer_range": 0.02,