patrickvonplaten commited on
Commit
35bc787
1 Parent(s): 64c03ad
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -13,7 +13,7 @@
13
  "intermediate_size": 10240,
14
  "layer_norm_eps": 1e-05,
15
  "max_position_embeddings": 514,
16
- "model_type": "xlm-roberta",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 36,
19
  "pad_token_id": 1,
 
13
  "intermediate_size": 10240,
14
  "layer_norm_eps": 1e-05,
15
  "max_position_embeddings": 514,
16
+ "model_type": "xlm-roberta-xl",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 36,
19
  "pad_token_id": 1,