Max positional embedding causes error when exceeding 512.

#3
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
15
  "layer_norm_eps": 1e-05,
16
- "max_position_embeddings": 514,
17
  "model_type": "xlm-roberta",
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 12,
 
13
  "initializer_range": 0.02,
14
  "intermediate_size": 3072,
15
  "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 512,
17
  "model_type": "xlm-roberta",
18
  "num_attention_heads": 12,
19
  "num_hidden_layers": 12,