mayank-mishra
commited on
Commit
•
9ec6bd4
1
Parent(s):
afc3169
fix generation config
Browse files- config.json +3 -2
- generation_config.json +3 -2
config.json
CHANGED
@@ -4,8 +4,8 @@
|
|
4 |
],
|
5 |
"attention_bias": true,
|
6 |
"attention_dropout": 0.1,
|
7 |
-
"bos_token_id":
|
8 |
-
"eos_token_id":
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 2560,
|
11 |
"initializer_range": 0.02,
|
@@ -16,6 +16,7 @@
|
|
16 |
"num_attention_heads": 32,
|
17 |
"num_hidden_layers": 32,
|
18 |
"num_key_value_heads": 32,
|
|
|
19 |
"pretraining_tp": 1,
|
20 |
"rms_norm_eps": 1e-05,
|
21 |
"rope_scaling": null,
|
|
|
4 |
],
|
5 |
"attention_bias": true,
|
6 |
"attention_dropout": 0.1,
|
7 |
+
"bos_token_id": 0,
|
8 |
+
"eos_token_id": 0,
|
9 |
"hidden_act": "silu",
|
10 |
"hidden_size": 2560,
|
11 |
"initializer_range": 0.02,
|
|
|
16 |
"num_attention_heads": 32,
|
17 |
"num_hidden_layers": 32,
|
18 |
"num_key_value_heads": 32,
|
19 |
+
"pad_token_id": 0,
|
20 |
"pretraining_tp": 1,
|
21 |
"rms_norm_eps": 1e-05,
|
22 |
"rope_scaling": null,
|
generation_config.json
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
{
|
2 |
"_from_model_config": true,
|
3 |
-
"bos_token_id":
|
4 |
-
"eos_token_id":
|
|
|
5 |
"transformers_version": "4.41.0.dev0"
|
6 |
}
|
|
|
1 |
{
|
2 |
"_from_model_config": true,
|
3 |
+
"bos_token_id": 0,
|
4 |
+
"eos_token_id": 0,
|
5 |
+
"pad_token_id": 0,
|
6 |
"transformers_version": "4.41.0.dev0"
|
7 |
}
|