danitamayo commited on
Commit
25e585f
1 Parent(s): 4fb5a8c

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_name_or_path": "papluca/xlm-roberta-base-language-detection",
3
  "architectures": [
4
- "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
@@ -488,7 +488,7 @@
488
  },
489
  "layer_norm_eps": 1e-05,
490
  "max_position_embeddings": 514,
491
- "model_type": "roberta",
492
  "num_attention_heads": 12,
493
  "num_hidden_layers": 12,
494
  "output_past": true,
 
1
  {
2
  "_name_or_path": "papluca/xlm-roberta-base-language-detection",
3
  "architectures": [
4
+ "XLMRobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
488
  },
489
  "layer_norm_eps": 1e-05,
490
  "max_position_embeddings": 514,
491
+ "model_type": "xlm-roberta",
492
  "num_attention_heads": 12,
493
  "num_hidden_layers": 12,
494
  "output_past": true,