gustavomedeiros commited on
Commit
929f166
1 Parent(s): 240a640

End of training

Browse files
README.md CHANGED
@@ -15,7 +15,7 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [roberta-base](https://huggingface.co/roberta-base) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.3960
19
 
20
  ## Model description
21
 
@@ -47,16 +47,16 @@ The following hyperparameters were used during training:
47
 
48
  | Training Loss | Epoch | Step | Validation Loss |
49
  |:-------------:|:-----:|:-----:|:---------------:|
50
- | 0.8047 | 1.0 | 13504 | 0.6456 |
51
- | 0.5167 | 2.0 | 27008 | 0.5308 |
52
- | 0.3027 | 3.0 | 40512 | 0.4439 |
53
- | 0.5447 | 4.0 | 54016 | 0.4235 |
54
- | 0.3082 | 5.0 | 67520 | 0.3960 |
55
 
56
 
57
  ### Framework versions
58
 
59
- - Transformers 4.34.0
60
- - Pytorch 2.0.1+cu118
61
- - Datasets 2.14.5
62
  - Tokenizers 0.14.1
 
15
 
16
  This model is a fine-tuned version of [roberta-base](https://huggingface.co/roberta-base) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.4064
19
 
20
  ## Model description
21
 
 
47
 
48
  | Training Loss | Epoch | Step | Validation Loss |
49
  |:-------------:|:-----:|:-----:|:---------------:|
50
+ | 0.6062 | 1.0 | 13521 | 0.6637 |
51
+ | 0.342 | 2.0 | 27042 | 0.5389 |
52
+ | 0.3967 | 3.0 | 40563 | 0.4142 |
53
+ | 0.3748 | 4.0 | 54084 | 0.4064 |
54
+ | 0.3489 | 5.0 | 67605 | 0.4131 |
55
 
56
 
57
  ### Framework versions
58
 
59
+ - Transformers 4.34.1
60
+ - Pytorch 2.1.0+cu118
61
+ - Datasets 2.14.6
62
  - Tokenizers 0.14.1
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:209623465470a50e3ba9d5bc0bcb12117b89281cc25327c1932e58330723a4eb
3
  size 498694958
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d29103a1bbf372bae5140358b4fe676a958daa766cfb132548a56bb46bbea80
3
  size 498694958
special_tokens_map.json CHANGED
@@ -2,7 +2,13 @@
2
  "bos_token": "<s>",
3
  "cls_token": "<s>",
4
  "eos_token": "</s>",
5
- "mask_token": "<mask>",
 
 
 
 
 
 
6
  "pad_token": "<pad>",
7
  "sep_token": "</s>",
8
  "unk_token": "<unk>"
 
2
  "bos_token": "<s>",
3
  "cls_token": "<s>",
4
  "eos_token": "</s>",
5
+ "mask_token": {
6
+ "content": "<mask>",
7
+ "lstrip": true,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false
11
+ },
12
  "pad_token": "<pad>",
13
  "sep_token": "</s>",
14
  "unk_token": "<unk>"
tokenizer.json CHANGED
@@ -21,7 +21,7 @@
21
  "single_word": false,
22
  "lstrip": false,
23
  "rstrip": false,
24
- "normalized": false,
25
  "special": true
26
  },
27
  {
@@ -30,7 +30,7 @@
30
  "single_word": false,
31
  "lstrip": false,
32
  "rstrip": false,
33
- "normalized": false,
34
  "special": true
35
  },
36
  {
@@ -39,7 +39,7 @@
39
  "single_word": false,
40
  "lstrip": false,
41
  "rstrip": false,
42
- "normalized": false,
43
  "special": true
44
  },
45
  {
@@ -48,7 +48,7 @@
48
  "single_word": false,
49
  "lstrip": false,
50
  "rstrip": false,
51
- "normalized": false,
52
  "special": true
53
  },
54
  {
 
21
  "single_word": false,
22
  "lstrip": false,
23
  "rstrip": false,
24
+ "normalized": true,
25
  "special": true
26
  },
27
  {
 
30
  "single_word": false,
31
  "lstrip": false,
32
  "rstrip": false,
33
+ "normalized": true,
34
  "special": true
35
  },
36
  {
 
39
  "single_word": false,
40
  "lstrip": false,
41
  "rstrip": false,
42
+ "normalized": true,
43
  "special": true
44
  },
45
  {
 
48
  "single_word": false,
49
  "lstrip": false,
50
  "rstrip": false,
51
+ "normalized": true,
52
  "special": true
53
  },
54
  {
tokenizer_config.json CHANGED
@@ -4,7 +4,7 @@
4
  "0": {
5
  "content": "<s>",
6
  "lstrip": false,
7
- "normalized": false,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
@@ -12,7 +12,7 @@
12
  "1": {
13
  "content": "<pad>",
14
  "lstrip": false,
15
- "normalized": false,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
@@ -20,7 +20,7 @@
20
  "2": {
21
  "content": "</s>",
22
  "lstrip": false,
23
- "normalized": false,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
@@ -28,7 +28,7 @@
28
  "3": {
29
  "content": "<unk>",
30
  "lstrip": false,
31
- "normalized": false,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
@@ -42,7 +42,6 @@
42
  "special": true
43
  }
44
  },
45
- "additional_special_tokens": [],
46
  "bos_token": "<s>",
47
  "clean_up_tokenization_spaces": true,
48
  "cls_token": "<s>",
 
4
  "0": {
5
  "content": "<s>",
6
  "lstrip": false,
7
+ "normalized": true,
8
  "rstrip": false,
9
  "single_word": false,
10
  "special": true
 
12
  "1": {
13
  "content": "<pad>",
14
  "lstrip": false,
15
+ "normalized": true,
16
  "rstrip": false,
17
  "single_word": false,
18
  "special": true
 
20
  "2": {
21
  "content": "</s>",
22
  "lstrip": false,
23
+ "normalized": true,
24
  "rstrip": false,
25
  "single_word": false,
26
  "special": true
 
28
  "3": {
29
  "content": "<unk>",
30
  "lstrip": false,
31
+ "normalized": true,
32
  "rstrip": false,
33
  "single_word": false,
34
  "special": true
 
42
  "special": true
43
  }
44
  },
 
45
  "bos_token": "<s>",
46
  "clean_up_tokenization_spaces": true,
47
  "cls_token": "<s>",