khanhtq2802 commited on
Commit
3dca0f5
1 Parent(s): 2792415

Upload RobertaForSequenceClassification

Browse files
Files changed (1) hide show
  1. config.json +9 -9
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "khanhtq2802/thesis-model",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
@@ -12,18 +12,18 @@
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
- "0": "neutral",
16
- "1": "positive",
17
- "2": "negative",
18
- "3": "conflict"
19
  },
20
  "initializer_range": 0.02,
21
  "intermediate_size": 3072,
22
  "label2id": {
23
- "conflict": 3,
24
- "negative": 2,
25
- "neutral": 0,
26
- "positive": 1
27
  },
28
  "layer_norm_eps": 1e-05,
29
  "max_position_embeddings": 514,
 
1
  {
2
+ "_name_or_path": "../mlm_backtranslate",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
 
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
+ "0": "LABEL_0",
16
+ "1": "LABEL_1",
17
+ "2": "LABEL_2",
18
+ "3": "LABEL_3"
19
  },
20
  "initializer_range": 0.02,
21
  "intermediate_size": 3072,
22
  "label2id": {
23
+ "LABEL_0": 0,
24
+ "LABEL_1": 1,
25
+ "LABEL_2": 2,
26
+ "LABEL_3": 3
27
  },
28
  "layer_norm_eps": 1e-05,
29
  "max_position_embeddings": 514,