BKforKorea commited on
Commit
d960d5c
1 Parent(s): baf835f

End of training

Browse files
README.md CHANGED
@@ -1,5 +1,5 @@
1
  ---
2
- base_model: jinmang2/kpfbert
3
  tags:
4
  - generated_from_trainer
5
  metrics:
@@ -17,13 +17,13 @@ should probably proofread and complete it, then remove this comment. -->
17
 
18
  # model_y3_research_1
19
 
20
- This model is a fine-tuned version of [jinmang2/kpfbert](https://huggingface.co/jinmang2/kpfbert) on an unknown dataset.
21
  It achieves the following results on the evaluation set:
22
- - Loss: 0.7692
23
- - Accuracy: 0.6082
24
- - F1: 0.5677
25
- - Precision: 0.5728
26
- - Recall: 0.5706
27
 
28
  ## Model description
29
 
@@ -55,16 +55,16 @@ The following hyperparameters were used during training:
55
 
56
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall |
57
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|:---------:|:------:|
58
- | 0.9518 | 1.0 | 97 | 1.0302 | 0.4167 | 0.2266 | 0.2376 | 0.3280 |
59
- | 0.9191 | 2.0 | 194 | 0.9158 | 0.5833 | 0.5823 | 0.6203 | 0.6297 |
60
- | 0.8298 | 3.0 | 291 | 0.8410 | 0.6042 | 0.5903 | 0.5987 | 0.5848 |
61
- | 0.7093 | 4.0 | 388 | 1.1104 | 0.5208 | 0.5176 | 0.5135 | 0.5312 |
62
- | 0.5994 | 5.0 | 485 | 1.3535 | 0.5521 | 0.5437 | 0.5691 | 0.5555 |
63
- | 0.709 | 6.0 | 582 | 1.8616 | 0.5104 | 0.4892 | 0.4975 | 0.5019 |
64
- | 0.2919 | 7.0 | 679 | 2.5441 | 0.5208 | 0.5174 | 0.5164 | 0.5340 |
65
- | 0.1296 | 8.0 | 776 | 2.9520 | 0.5104 | 0.4975 | 0.5103 | 0.4931 |
66
- | 0.0012 | 9.0 | 873 | 3.1406 | 0.5312 | 0.5182 | 0.5165 | 0.5209 |
67
- | 0.0393 | 10.0 | 970 | 3.1606 | 0.5208 | 0.5109 | 0.5084 | 0.5142 |
68
 
69
 
70
  ### Framework versions
 
1
  ---
2
+ base_model: klue/roberta-large
3
  tags:
4
  - generated_from_trainer
5
  metrics:
 
17
 
18
  # model_y3_research_1
19
 
20
+ This model is a fine-tuned version of [klue/roberta-large](https://huggingface.co/klue/roberta-large) on an unknown dataset.
21
  It achieves the following results on the evaluation set:
22
+ - Loss: 0.9169
23
+ - Accuracy: 0.5979
24
+ - F1: 0.5435
25
+ - Precision: 0.5801
26
+ - Recall: 0.5487
27
 
28
  ## Model description
29
 
 
55
 
56
  | Training Loss | Epoch | Step | Validation Loss | Accuracy | F1 | Precision | Recall |
57
  |:-------------:|:-----:|:----:|:---------------:|:--------:|:------:|:---------:|:------:|
58
+ | 0.9798 | 1.0 | 97 | 0.9334 | 0.5833 | 0.4128 | 0.4359 | 0.4577 |
59
+ | 0.9489 | 2.0 | 194 | 0.9621 | 0.4792 | 0.2160 | 0.1597 | 0.3333 |
60
+ | 0.9564 | 3.0 | 291 | 0.9505 | 0.5104 | 0.3456 | 0.3323 | 0.3764 |
61
+ | 0.8319 | 4.0 | 388 | 0.8693 | 0.6458 | 0.5980 | 0.5970 | 0.6167 |
62
+ | 0.7045 | 5.0 | 485 | 1.1875 | 0.5729 | 0.4888 | 0.5051 | 0.4891 |
63
+ | 0.6337 | 6.0 | 582 | 1.7888 | 0.6042 | 0.4288 | 0.4648 | 0.4752 |
64
+ | 0.3682 | 7.0 | 679 | 2.0383 | 0.5521 | 0.4904 | 0.4889 | 0.4967 |
65
+ | 0.2195 | 8.0 | 776 | 2.3023 | 0.5625 | 0.4993 | 0.4986 | 0.5055 |
66
+ | 0.0244 | 9.0 | 873 | 2.8742 | 0.5417 | 0.4650 | 0.4650 | 0.4674 |
67
+ | 0.1459 | 10.0 | 970 | 2.9738 | 0.5521 | 0.4999 | 0.5001 | 0.5157 |
68
 
69
 
70
  ### Framework versions
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ef7c7322bcb14ad70d73af483f49af36afd799696e2a3634ddac3f7ba1e3126c
3
  size 1346686020
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8c1d1a2fdd03192f853b19ff9fcf0c977aa86cc095dcf6ae9f00877a2ad898a
3
  size 1346686020
special_tokens_map.json CHANGED
@@ -1,7 +1,51 @@
1
  {
2
- "cls_token": "[CLS]",
3
- "mask_token": "[MASK]",
4
- "pad_token": "[PAD]",
5
- "sep_token": "[SEP]",
6
- "unk_token": "[UNK]"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
  }
 
1
  {
2
+ "bos_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "cls_token": {
10
+ "content": "[CLS]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "eos_token": {
17
+ "content": "[SEP]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "mask_token": {
24
+ "content": "[MASK]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "pad_token": {
31
+ "content": "[PAD]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ },
37
+ "sep_token": {
38
+ "content": "[SEP]",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false
43
+ },
44
+ "unk_token": {
45
+ "content": "[UNK]",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false
50
+ }
51
  }
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
- "content": "[PAD]",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
@@ -9,7 +9,7 @@
9
  "special": true
10
  },
11
  "1": {
12
- "content": "[UNK]",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
@@ -17,7 +17,7 @@
17
  "special": true
18
  },
19
  "2": {
20
- "content": "[CLS]",
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
@@ -25,7 +25,7 @@
25
  "special": true
26
  },
27
  "3": {
28
- "content": "[SEP]",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
@@ -41,12 +41,14 @@
41
  "special": true
42
  }
43
  },
 
44
  "clean_up_tokenization_spaces": true,
45
  "cls_token": "[CLS]",
46
  "do_basic_tokenize": true,
47
  "do_lower_case": false,
 
48
  "mask_token": "[MASK]",
49
- "model_max_length": 1000000000000000019884624838656,
50
  "never_split": null,
51
  "pad_token": "[PAD]",
52
  "sep_token": "[SEP]",
 
1
  {
2
  "added_tokens_decoder": {
3
  "0": {
4
+ "content": "[CLS]",
5
  "lstrip": false,
6
  "normalized": false,
7
  "rstrip": false,
 
9
  "special": true
10
  },
11
  "1": {
12
+ "content": "[PAD]",
13
  "lstrip": false,
14
  "normalized": false,
15
  "rstrip": false,
 
17
  "special": true
18
  },
19
  "2": {
20
+ "content": "[SEP]",
21
  "lstrip": false,
22
  "normalized": false,
23
  "rstrip": false,
 
25
  "special": true
26
  },
27
  "3": {
28
+ "content": "[UNK]",
29
  "lstrip": false,
30
  "normalized": false,
31
  "rstrip": false,
 
41
  "special": true
42
  }
43
  },
44
+ "bos_token": "[CLS]",
45
  "clean_up_tokenization_spaces": true,
46
  "cls_token": "[CLS]",
47
  "do_basic_tokenize": true,
48
  "do_lower_case": false,
49
+ "eos_token": "[SEP]",
50
  "mask_token": "[MASK]",
51
+ "model_max_length": 512,
52
  "never_split": null,
53
  "pad_token": "[PAD]",
54
  "sep_token": "[SEP]",
vocab.txt CHANGED
The diff for this file is too large to render. See raw diff