Update config.json
Browse files- config.json +1 -1
config.json
CHANGED
@@ -26,7 +26,7 @@
|
|
26 |
"intermediate_size": 18432,
|
27 |
"layer_norm_epsilon": 1e-05,
|
28 |
"lora_config": null,
|
29 |
-
"max_position_embeddings":
|
30 |
"mlp_adapter_config": {
|
31 |
"hidden_act": "gelu",
|
32 |
"hidden_size": 4608,
|
|
|
26 |
"intermediate_size": 18432,
|
27 |
"layer_norm_epsilon": 1e-05,
|
28 |
"lora_config": null,
|
29 |
+
"max_position_embeddings": 2048,
|
30 |
"mlp_adapter_config": {
|
31 |
"hidden_act": "gelu",
|
32 |
"hidden_size": 4608,
|