jonsaadfalcon
commited on
Commit
•
1761a9d
1
Parent(s):
90f1c7e
Update config.json
Browse files- config.json +2 -2
config.json
CHANGED
@@ -28,8 +28,8 @@
|
|
28 |
"intermediate_size": 3072,
|
29 |
"layer_norm_eps": 1e-12,
|
30 |
"long_conv_kernel_learning_rate": 0.001,
|
31 |
-
"long_conv_l_max":
|
32 |
-
"max_position_embeddings":
|
33 |
"model_type": "bert",
|
34 |
"monarch_mlp_nblocks": 4,
|
35 |
"num_attention_heads": 12,
|
|
|
28 |
"intermediate_size": 3072,
|
29 |
"layer_norm_eps": 1e-12,
|
30 |
"long_conv_kernel_learning_rate": 0.001,
|
31 |
+
"long_conv_l_max": 128,
|
32 |
+
"max_position_embeddings": 128,
|
33 |
"model_type": "bert",
|
34 |
"monarch_mlp_nblocks": 4,
|
35 |
"num_attention_heads": 12,
|