khanhtq2802 commited on
Commit
61b621f
1 Parent(s): c00cca1

Upload RobertaForSequenceClassification

Browse files
Files changed (2) hide show
  1. config.json +10 -10
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "../weights",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
@@ -12,18 +12,18 @@
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
- "0": "neutral",
16
- "1": "positive",
17
- "2": "negative",
18
- "3": "conflict"
19
  },
20
  "initializer_range": 0.02,
21
  "intermediate_size": 3072,
22
  "label2id": {
23
- "conflict": 3,
24
- "negative": 2,
25
- "neutral": 0,
26
- "positive": 1
27
  },
28
  "layer_norm_eps": 1e-05,
29
  "max_position_embeddings": 514,
@@ -34,7 +34,7 @@
34
  "position_embedding_type": "absolute",
35
  "problem_type": "single_label_classification",
36
  "torch_dtype": "float32",
37
- "transformers_version": "4.40.2",
38
  "type_vocab_size": 1,
39
  "use_cache": true,
40
  "vocab_size": 50265
 
1
  {
2
+ "_name_or_path": "../best_weights",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
 
12
  "hidden_dropout_prob": 0.1,
13
  "hidden_size": 768,
14
  "id2label": {
15
+ "0": "LABEL_0",
16
+ "1": "LABEL_1",
17
+ "2": "LABEL_2",
18
+ "3": "LABEL_3"
19
  },
20
  "initializer_range": 0.02,
21
  "intermediate_size": 3072,
22
  "label2id": {
23
+ "LABEL_0": 0,
24
+ "LABEL_1": 1,
25
+ "LABEL_2": 2,
26
+ "LABEL_3": 3
27
  },
28
  "layer_norm_eps": 1e-05,
29
  "max_position_embeddings": 514,
 
34
  "position_embedding_type": "absolute",
35
  "problem_type": "single_label_classification",
36
  "torch_dtype": "float32",
37
+ "transformers_version": "4.41.2",
38
  "type_vocab_size": 1,
39
  "use_cache": true,
40
  "vocab_size": 50265
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cc8da85cdebf4d9de1f2ed9d43ed18ea03e865396633abbccf5ff79d4c430b06
3
  size 498618976
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:05c5b7a9db2fbf5301cf6d40551e70f584f8c37b5fcc924836f4cb1fb065b0ba
3
  size 498618976