ybelkada commited on
Commit
7a68a7c
1 Parent(s): ee3547e

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -4
config.json CHANGED
@@ -24,7 +24,7 @@
24
  "dense_act_fn": "gelu_new",
25
  "diversity_penalty": 0.0,
26
  "do_sample": false,
27
- "dropout_rate": 0.1,
28
  "early_stopping": false,
29
  "encoder_hidden_size": 768,
30
  "encoder_no_repeat_ngram_size": 0,
@@ -96,7 +96,7 @@
96
  "_name_or_path": "",
97
  "add_cross_attention": false,
98
  "architectures": null,
99
- "attention_dropout": 0.0,
100
  "bad_words_ids": null,
101
  "begin_suppress_tokens": null,
102
  "bos_token_id": null,
@@ -108,7 +108,7 @@
108
  "dense_act_fn": "gelu_new",
109
  "diversity_penalty": 0.0,
110
  "do_sample": false,
111
- "dropout_rate": 0.0,
112
  "early_stopping": false,
113
  "encoder_no_repeat_ngram_size": 0,
114
  "eos_token_id": null,
@@ -116,7 +116,7 @@
116
  "finetuning_task": null,
117
  "forced_bos_token_id": null,
118
  "forced_eos_token_id": null,
119
- "hidden_dropout_prob": 0.0,
120
  "hidden_size": 768,
121
  "id2label": {
122
  "0": "LABEL_0",
 
24
  "dense_act_fn": "gelu_new",
25
  "diversity_penalty": 0.0,
26
  "do_sample": false,
27
+ "dropout_rate": 0.05,
28
  "early_stopping": false,
29
  "encoder_hidden_size": 768,
30
  "encoder_no_repeat_ngram_size": 0,
 
96
  "_name_or_path": "",
97
  "add_cross_attention": false,
98
  "architectures": null,
99
+ "attention_dropout": 0.05,
100
  "bad_words_ids": null,
101
  "begin_suppress_tokens": null,
102
  "bos_token_id": null,
 
108
  "dense_act_fn": "gelu_new",
109
  "diversity_penalty": 0.0,
110
  "do_sample": false,
111
+ "dropout_rate": 0.06,
112
  "early_stopping": false,
113
  "encoder_no_repeat_ngram_size": 0,
114
  "eos_token_id": null,
 
116
  "finetuning_task": null,
117
  "forced_bos_token_id": null,
118
  "forced_eos_token_id": null,
119
+ "hidden_dropout_prob": 0.05,
120
  "hidden_size": 768,
121
  "id2label": {
122
  "0": "LABEL_0",