alex2awesome
commited on
Commit
·
b206b13
1
Parent(s):
ab686d6
Update config.json
Browse files- config.json +1 -1
config.json
CHANGED
@@ -29,7 +29,7 @@
|
|
29 |
"intermediate_size": 3072,
|
30 |
"layer_norm_eps": 1e-05,
|
31 |
"max_num_sentences": 90,
|
32 |
-
"max_token_length_per_sentence":
|
33 |
"max_position_embeddings": 514,
|
34 |
"model_type": "roberta",
|
35 |
"num_attention_heads": 12,
|
|
|
29 |
"intermediate_size": 3072,
|
30 |
"layer_norm_eps": 1e-05,
|
31 |
"max_num_sentences": 90,
|
32 |
+
"max_token_length_per_sentence": 120,
|
33 |
"max_position_embeddings": 514,
|
34 |
"model_type": "roberta",
|
35 |
"num_attention_heads": 12,
|