truongnguyenxuanvinh commited on
Commit
bf30185
1 Parent(s): 27621cb

Upload XLMRobertaForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +5 -4
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "roberta-large",
3
  "architectures": [
4
- "RobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
@@ -28,9 +28,10 @@
28
  },
29
  "layer_norm_eps": 1e-05,
30
  "max_position_embeddings": 514,
31
- "model_type": "roberta",
32
  "num_attention_heads": 16,
33
  "num_hidden_layers": 24,
 
34
  "pad_token_id": 1,
35
  "position_embedding_type": "absolute",
36
  "problem_type": "multi_label_classification",
@@ -38,5 +39,5 @@
38
  "transformers_version": "4.44.2",
39
  "type_vocab_size": 1,
40
  "use_cache": true,
41
- "vocab_size": 50265
42
  }
 
1
  {
2
+ "_name_or_path": "xlm-roberta-large",
3
  "architectures": [
4
+ "XLMRobertaForSequenceClassification"
5
  ],
6
  "attention_probs_dropout_prob": 0.1,
7
  "bos_token_id": 0,
 
28
  },
29
  "layer_norm_eps": 1e-05,
30
  "max_position_embeddings": 514,
31
+ "model_type": "xlm-roberta",
32
  "num_attention_heads": 16,
33
  "num_hidden_layers": 24,
34
+ "output_past": true,
35
  "pad_token_id": 1,
36
  "position_embedding_type": "absolute",
37
  "problem_type": "multi_label_classification",
 
39
  "transformers_version": "4.44.2",
40
  "type_vocab_size": 1,
41
  "use_cache": true,
42
+ "vocab_size": 250002
43
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c720f359a3dd64508669bfb3aa9da46bb1273cd38d364a191780a0002766dac3
3
- size 1421507716
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07c77c16142c9a5d9f9d31bf9881ca0cddbb711a43cf87e0fbf75272500b1434
3
+ size 2239630972