eevvgg commited on
Commit
11a8cac
1 Parent(s): 34b5713

update RoBERTa model

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "j-hartmann/sentiment-roberta-large-english-3-classes",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
@@ -7,27 +7,26 @@
7
  "bos_token_id": 0,
8
  "classifier_dropout": null,
9
  "eos_token_id": 2,
10
- "gradient_checkpointing": false,
11
  "hidden_act": "gelu",
12
  "hidden_dropout_prob": 0.1,
13
- "hidden_size": 1024,
14
  "id2label": {
15
- "0": "neutral",
16
- "1": "positive",
17
- "2": "negative"
18
  },
19
  "initializer_range": 0.02,
20
- "intermediate_size": 4096,
21
  "label2id": {
22
- "negative": 2,
23
- "neutral": 0,
24
- "positive": 1
25
  },
26
  "layer_norm_eps": 1e-05,
27
  "max_position_embeddings": 514,
28
  "model_type": "roberta",
29
- "num_attention_heads": 16,
30
- "num_hidden_layers": 24,
31
  "pad_token_id": 1,
32
  "position_embedding_type": "absolute",
33
  "problem_type": "single_label_classification",
 
1
  {
2
+ "_name_or_path": "roberta-base",
3
  "architectures": [
4
  "RobertaForSequenceClassification"
5
  ],
 
7
  "bos_token_id": 0,
8
  "classifier_dropout": null,
9
  "eos_token_id": 2,
 
10
  "hidden_act": "gelu",
11
  "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
  "id2label": {
14
+ "0": "LABEL_0",
15
+ "1": "LABEL_1",
16
+ "2": "LABEL_2"
17
  },
18
  "initializer_range": 0.02,
19
+ "intermediate_size": 3072,
20
  "label2id": {
21
+ "LABEL_0": 0,
22
+ "LABEL_1": 1,
23
+ "LABEL_2": 2
24
  },
25
  "layer_norm_eps": 1e-05,
26
  "max_position_embeddings": 514,
27
  "model_type": "roberta",
28
+ "num_attention_heads": 12,
29
+ "num_hidden_layers": 12,
30
  "pad_token_id": 1,
31
  "position_embedding_type": "absolute",
32
  "problem_type": "single_label_classification",
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:12a0d1b8926589c7a560bbce5a9c29309c31eda1f948107ac340fab11514c2f0
3
- size 1421591285
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce3b7a2ccac1c9b0211223a068211830cbdbfde0cceb24752d1984c45f7093a1
3
+ size 498665141
special_tokens_map.json CHANGED
@@ -1,51 +1,15 @@
1
  {
2
- "bos_token": {
3
- "content": "<s>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "cls_token": {
10
- "content": "<s>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "eos_token": {
17
- "content": "</s>",
18
- "lstrip": false,
19
- "normalized": true,
20
- "rstrip": false,
21
- "single_word": false
22
- },
23
  "mask_token": {
24
  "content": "<mask>",
25
  "lstrip": true,
26
- "normalized": true,
27
  "rstrip": false,
28
  "single_word": false
29
  },
30
- "pad_token": {
31
- "content": "<pad>",
32
- "lstrip": false,
33
- "normalized": true,
34
- "rstrip": false,
35
- "single_word": false
36
- },
37
- "sep_token": {
38
- "content": "</s>",
39
- "lstrip": false,
40
- "normalized": true,
41
- "rstrip": false,
42
- "single_word": false
43
- },
44
- "unk_token": {
45
- "content": "<unk>",
46
- "lstrip": false,
47
- "normalized": true,
48
- "rstrip": false,
49
- "single_word": false
50
- }
51
  }
 
1
  {
2
+ "bos_token": "<s>",
3
+ "cls_token": "<s>",
4
+ "eos_token": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "mask_token": {
6
  "content": "<mask>",
7
  "lstrip": true,
8
+ "normalized": false,
9
  "rstrip": false,
10
  "single_word": false
11
  },
12
+ "pad_token": "<pad>",
13
+ "sep_token": "</s>",
14
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
  }
tokenizer.json CHANGED
@@ -1,19 +1,7 @@
1
  {
2
  "version": "1.0",
3
- "truncation": {
4
- "direction": "Right",
5
- "max_length": 200,
6
- "strategy": "LongestFirst",
7
- "stride": 0
8
- },
9
- "padding": {
10
- "strategy": "BatchLongest",
11
- "direction": "Right",
12
- "pad_to_multiple_of": null,
13
- "pad_id": 1,
14
- "pad_type_id": 0,
15
- "pad_token": "<pad>"
16
- },
17
  "added_tokens": [
18
  {
19
  "id": 0,
@@ -21,7 +9,7 @@
21
  "single_word": false,
22
  "lstrip": false,
23
  "rstrip": false,
24
- "normalized": true,
25
  "special": true
26
  },
27
  {
@@ -30,7 +18,7 @@
30
  "single_word": false,
31
  "lstrip": false,
32
  "rstrip": false,
33
- "normalized": true,
34
  "special": true
35
  },
36
  {
@@ -39,7 +27,7 @@
39
  "single_word": false,
40
  "lstrip": false,
41
  "rstrip": false,
42
- "normalized": true,
43
  "special": true
44
  },
45
  {
@@ -48,7 +36,7 @@
48
  "single_word": false,
49
  "lstrip": false,
50
  "rstrip": false,
51
- "normalized": true,
52
  "special": true
53
  },
54
  {
@@ -57,7 +45,7 @@
57
  "single_word": false,
58
  "lstrip": true,
59
  "rstrip": false,
60
- "normalized": true,
61
  "special": true
62
  }
63
  ],
 
1
  {
2
  "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
9
  "single_word": false,
10
  "lstrip": false,
11
  "rstrip": false,
12
+ "normalized": false,
13
  "special": true
14
  },
15
  {
 
18
  "single_word": false,
19
  "lstrip": false,
20
  "rstrip": false,
21
+ "normalized": false,
22
  "special": true
23
  },
24
  {
 
27
  "single_word": false,
28
  "lstrip": false,
29
  "rstrip": false,
30
+ "normalized": false,
31
  "special": true
32
  },
33
  {
 
36
  "single_word": false,
37
  "lstrip": false,
38
  "rstrip": false,
39
+ "normalized": false,
40
  "special": true
41
  },
42
  {
 
45
  "single_word": false,
46
  "lstrip": true,
47
  "rstrip": false,
48
+ "normalized": false,
49
  "special": true
50
  }
51
  ],
tokenizer_config.json CHANGED
@@ -1,66 +1,16 @@
1
  {
2
  "add_prefix_space": false,
3
- "bos_token": {
4
- "__type": "AddedToken",
5
- "content": "<s>",
6
- "lstrip": false,
7
- "normalized": true,
8
- "rstrip": false,
9
- "single_word": false
10
- },
11
- "cls_token": {
12
- "__type": "AddedToken",
13
- "content": "<s>",
14
- "lstrip": false,
15
- "normalized": true,
16
- "rstrip": false,
17
- "single_word": false
18
- },
19
- "do_lower_case": false,
20
- "eos_token": {
21
- "__type": "AddedToken",
22
- "content": "</s>",
23
- "lstrip": false,
24
- "normalized": true,
25
- "rstrip": false,
26
- "single_word": false
27
- },
28
  "errors": "replace",
29
- "mask_token": {
30
- "__type": "AddedToken",
31
- "content": "<mask>",
32
- "lstrip": true,
33
- "normalized": true,
34
- "rstrip": false,
35
- "single_word": false
36
- },
37
  "model_max_length": 512,
38
- "name_or_path": "j-hartmann/sentiment-roberta-large-english-3-classes",
39
- "pad_token": {
40
- "__type": "AddedToken",
41
- "content": "<pad>",
42
- "lstrip": false,
43
- "normalized": true,
44
- "rstrip": false,
45
- "single_word": false
46
- },
47
- "sep_token": {
48
- "__type": "AddedToken",
49
- "content": "</s>",
50
- "lstrip": false,
51
- "normalized": true,
52
- "rstrip": false,
53
- "single_word": false
54
- },
55
- "special_tokens_map_file": "/root/.cache/huggingface/hub/models--j-hartmann--sentiment-roberta-large-english-3-classes/snapshots/81cdc0fe3eee1bc18d95ffdfb56b2151a39c9007/special_tokens_map.json",
56
  "tokenizer_class": "RobertaTokenizer",
57
  "trim_offsets": true,
58
- "unk_token": {
59
- "__type": "AddedToken",
60
- "content": "<unk>",
61
- "lstrip": false,
62
- "normalized": true,
63
- "rstrip": false,
64
- "single_word": false
65
- }
66
  }
 
1
  {
2
  "add_prefix_space": false,
3
+ "bos_token": "<s>",
4
+ "cls_token": "<s>",
5
+ "eos_token": "</s>",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
  "errors": "replace",
7
+ "mask_token": "<mask>",
 
 
 
 
 
 
 
8
  "model_max_length": 512,
9
+ "name_or_path": "roberta-base",
10
+ "pad_token": "<pad>",
11
+ "sep_token": "</s>",
12
+ "special_tokens_map_file": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  "tokenizer_class": "RobertaTokenizer",
14
  "trim_offsets": true,
15
+ "unk_token": "<unk>"
 
 
 
 
 
 
 
16
  }
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:920e33f64cd150ff24c56ecc4a8f2d91e155fdceb3f2db98b392b1ee463aa4ea
3
- size 3451
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:368222b95b15604157afa1d409f4acb6777f76b06bcc5ebc28f6ddc38c76d764
3
+ size 3515