Commit
·
0f01265
1
Parent(s):
11256a8
SudiptoPramanik/RewardModelSmallerQuestionWithTwoLabelsLengthJustified
Browse files- README.md +40 -11
- adapter_config.json +23 -0
- adapter_model.safetensors +3 -0
- runs/Dec04_09-04-32_1f3b4092150b/events.out.tfevents.1701680684.1f3b4092150b.952.0 +3 -0
- runs/Dec04_09-15-29_1f3b4092150b/events.out.tfevents.1701681335.1f3b4092150b.952.1 +3 -0
- runs/Dec04_09-15-29_1f3b4092150b/events.out.tfevents.1701681615.1f3b4092150b.952.2 +3 -0
- runs/Dec04_09-21-43_1f3b4092150b/events.out.tfevents.1701681708.1f3b4092150b.952.3 +3 -0
- runs/Dec04_09-21-43_1f3b4092150b/events.out.tfevents.1701682531.1f3b4092150b.952.4 +3 -0
- runs/Dec04_09-43-11_1f3b4092150b/events.out.tfevents.1701683008.1f3b4092150b.952.5 +3 -0
- runs/Dec04_09-46-36_1f3b4092150b/events.out.tfevents.1701683212.1f3b4092150b.952.6 +3 -0
- runs/Dec04_09-50-59_1f3b4092150b/events.out.tfevents.1701683483.1f3b4092150b.17971.0 +3 -0
- runs/Dec04_09-50-59_1f3b4092150b/events.out.tfevents.1701688189.1f3b4092150b.17971.1 +3 -0
- tokenizer.json +2 -2
- tokenizer_config.json +1 -1
- training_args.bin +1 -1
README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
---
|
2 |
license: mit
|
3 |
-
base_model: roberta-
|
4 |
tags:
|
5 |
- generated_from_trainer
|
6 |
metrics:
|
@@ -16,12 +16,12 @@ should probably proofread and complete it, then remove this comment. -->
|
|
16 |
|
17 |
# RewardModelSmallerQuestionWithTwoLabelsLengthJustified
|
18 |
|
19 |
-
This model is a fine-tuned version of [roberta-
|
20 |
It achieves the following results on the evaluation set:
|
21 |
-
- Loss: 0.
|
22 |
-
- F1: 0.
|
23 |
-
- Roc Auc: 0.
|
24 |
-
- Accuracy: 0.
|
25 |
|
26 |
## Model description
|
27 |
|
@@ -40,20 +40,49 @@ More information needed
|
|
40 |
### Training hyperparameters
|
41 |
|
42 |
The following hyperparameters were used during training:
|
43 |
-
- learning_rate:
|
44 |
- train_batch_size: 8
|
45 |
- eval_batch_size: 8
|
46 |
- seed: 42
|
47 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
48 |
-
- lr_scheduler_type:
|
49 |
-
-
|
|
|
50 |
|
51 |
### Training results
|
52 |
|
53 |
| Training Loss | Epoch | Step | Validation Loss | F1 | Roc Auc | Accuracy |
|
54 |
|:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:--------:|
|
55 |
-
| 0.
|
56 |
-
| 0.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
|
58 |
|
59 |
### Framework versions
|
|
|
1 |
---
|
2 |
license: mit
|
3 |
+
base_model: roberta-large
|
4 |
tags:
|
5 |
- generated_from_trainer
|
6 |
metrics:
|
|
|
16 |
|
17 |
# RewardModelSmallerQuestionWithTwoLabelsLengthJustified
|
18 |
|
19 |
+
This model is a fine-tuned version of [roberta-large](https://huggingface.co/roberta-large) on the None dataset.
|
20 |
It achieves the following results on the evaluation set:
|
21 |
+
- Loss: 0.5248
|
22 |
+
- F1: 0.7539
|
23 |
+
- Roc Auc: 0.7508
|
24 |
+
- Accuracy: 0.7380
|
25 |
|
26 |
## Model description
|
27 |
|
|
|
40 |
### Training hyperparameters
|
41 |
|
42 |
The following hyperparameters were used during training:
|
43 |
+
- learning_rate: 0.0001
|
44 |
- train_batch_size: 8
|
45 |
- eval_batch_size: 8
|
46 |
- seed: 42
|
47 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
48 |
+
- lr_scheduler_type: constant
|
49 |
+
- lr_scheduler_warmup_ratio: 0.1
|
50 |
+
- num_epochs: 30
|
51 |
|
52 |
### Training results
|
53 |
|
54 |
| Training Loss | Epoch | Step | Validation Loss | F1 | Roc Auc | Accuracy |
|
55 |
|:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|:--------:|
|
56 |
+
| 0.7105 | 1.0 | 145 | 0.6814 | 0.5260 | 0.5192 | 0.5048 |
|
57 |
+
| 0.6899 | 2.0 | 290 | 0.6530 | 0.6090 | 0.6102 | 0.6038 |
|
58 |
+
| 0.6703 | 3.0 | 435 | 0.6318 | 0.6387 | 0.6565 | 0.6070 |
|
59 |
+
| 0.6432 | 4.0 | 580 | 0.6098 | 0.6961 | 0.7029 | 0.6805 |
|
60 |
+
| 0.6273 | 5.0 | 725 | 0.5909 | 0.7118 | 0.7141 | 0.7061 |
|
61 |
+
| 0.64 | 6.0 | 870 | 0.5837 | 0.7038 | 0.7029 | 0.6965 |
|
62 |
+
| 0.6178 | 7.0 | 1015 | 0.5829 | 0.7005 | 0.6981 | 0.6869 |
|
63 |
+
| 0.6342 | 8.0 | 1160 | 0.5855 | 0.6785 | 0.6805 | 0.6741 |
|
64 |
+
| 0.583 | 9.0 | 1305 | 0.5549 | 0.7310 | 0.7284 | 0.7188 |
|
65 |
+
| 0.5801 | 10.0 | 1450 | 0.5805 | 0.6710 | 0.6773 | 0.6581 |
|
66 |
+
| 0.6279 | 11.0 | 1595 | 0.6581 | 0.6003 | 0.6022 | 0.5974 |
|
67 |
+
| 0.6112 | 12.0 | 1740 | 0.5382 | 0.7372 | 0.7380 | 0.7348 |
|
68 |
+
| 0.5967 | 13.0 | 1885 | 0.6305 | 0.6443 | 0.6438 | 0.6422 |
|
69 |
+
| 0.5927 | 14.0 | 2030 | 0.6144 | 0.6613 | 0.6645 | 0.6550 |
|
70 |
+
| 0.5968 | 15.0 | 2175 | 0.5825 | 0.6901 | 0.6901 | 0.6901 |
|
71 |
+
| 0.6122 | 16.0 | 2320 | 0.5858 | 0.6815 | 0.6805 | 0.6773 |
|
72 |
+
| 0.5941 | 17.0 | 2465 | 0.5719 | 0.6979 | 0.7013 | 0.6901 |
|
73 |
+
| 0.5977 | 18.0 | 2610 | 0.6043 | 0.6699 | 0.6709 | 0.6677 |
|
74 |
+
| 0.59 | 19.0 | 2755 | 0.5465 | 0.7203 | 0.7220 | 0.7157 |
|
75 |
+
| 0.5871 | 20.0 | 2900 | 0.6474 | 0.6262 | 0.6262 | 0.6262 |
|
76 |
+
| 0.5932 | 21.0 | 3045 | 0.5701 | 0.6945 | 0.6965 | 0.6901 |
|
77 |
+
| 0.5966 | 22.0 | 3190 | 0.5281 | 0.7387 | 0.7412 | 0.7316 |
|
78 |
+
| 0.6006 | 23.0 | 3335 | 0.5713 | 0.6945 | 0.6965 | 0.6869 |
|
79 |
+
| 0.5696 | 24.0 | 3480 | 0.6498 | 0.6242 | 0.6230 | 0.6198 |
|
80 |
+
| 0.5921 | 25.0 | 3625 | 0.6453 | 0.6359 | 0.6342 | 0.6294 |
|
81 |
+
| 0.5761 | 26.0 | 3770 | 0.5226 | 0.7528 | 0.7524 | 0.7508 |
|
82 |
+
| 0.5504 | 27.0 | 3915 | 0.5793 | 0.6751 | 0.6725 | 0.6645 |
|
83 |
+
| 0.5891 | 28.0 | 4060 | 0.5248 | 0.7539 | 0.7508 | 0.7380 |
|
84 |
+
| 0.5757 | 29.0 | 4205 | 0.5983 | 0.6699 | 0.6693 | 0.6677 |
|
85 |
+
| 0.5631 | 30.0 | 4350 | 0.6187 | 0.6454 | 0.6454 | 0.6454 |
|
86 |
|
87 |
|
88 |
### Framework versions
|
adapter_config.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"alpha_pattern": {},
|
3 |
+
"auto_mapping": null,
|
4 |
+
"base_model_name_or_path": "roberta-large",
|
5 |
+
"bias": "none",
|
6 |
+
"fan_in_fan_out": false,
|
7 |
+
"inference_mode": true,
|
8 |
+
"init_lora_weights": true,
|
9 |
+
"layers_pattern": null,
|
10 |
+
"layers_to_transform": null,
|
11 |
+
"lora_alpha": 16,
|
12 |
+
"lora_dropout": 0.1,
|
13 |
+
"modules_to_save": null,
|
14 |
+
"peft_type": "LORA",
|
15 |
+
"r": 2,
|
16 |
+
"rank_pattern": {},
|
17 |
+
"revision": null,
|
18 |
+
"target_modules": [
|
19 |
+
"query",
|
20 |
+
"value"
|
21 |
+
],
|
22 |
+
"task_type": "SEQ_CLS"
|
23 |
+
}
|
adapter_model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:138b66bdbcf6f7c9196c3fdb6fc1d4d6c2e1a5c1cd2decfc6f6895976f39fcac
|
3 |
+
size 5007376
|
runs/Dec04_09-04-32_1f3b4092150b/events.out.tfevents.1701680684.1f3b4092150b.952.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71e1ba9594798004d01f6fd1ab30c2f7b368a4d6dfb6efcfccffe22139796b41
|
3 |
+
size 9170
|
runs/Dec04_09-15-29_1f3b4092150b/events.out.tfevents.1701681335.1f3b4092150b.952.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:51e069d684e99621f8a79f7059e14e326a22ccf0f1b1e60c356ced48f9898913
|
3 |
+
size 8025
|
runs/Dec04_09-15-29_1f3b4092150b/events.out.tfevents.1701681615.1f3b4092150b.952.2
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:006af2a2ae3fa0d98a19d33423538928e15e6fb19a4f09a5f714d6d5d4b4b31a
|
3 |
+
size 508
|
runs/Dec04_09-21-43_1f3b4092150b/events.out.tfevents.1701681708.1f3b4092150b.952.3
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:084cbf7cd27f88f2083a9cf3857bba69ea1b93aca8a24a6dc9fdc26b7433f1e3
|
3 |
+
size 14424
|
runs/Dec04_09-21-43_1f3b4092150b/events.out.tfevents.1701682531.1f3b4092150b.952.4
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2f7cf00578d38e26ff24d107e7221f9ff72b9ab67dba467920bb331fec99f4d1
|
3 |
+
size 508
|
runs/Dec04_09-43-11_1f3b4092150b/events.out.tfevents.1701683008.1f3b4092150b.952.5
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a12b7220cab1b3be39b68b32969166cb755e6a477c3ff360f27b8dbcbe6e134e
|
3 |
+
size 4632
|
runs/Dec04_09-46-36_1f3b4092150b/events.out.tfevents.1701683212.1f3b4092150b.952.6
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:790b8a1f54a9481d92939c9032c5c1f0bb7577d411f52323d0c18afee590d12e
|
3 |
+
size 5052
|
runs/Dec04_09-50-59_1f3b4092150b/events.out.tfevents.1701683483.1f3b4092150b.17971.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0727cb90a14885349a784c0cfe4cd68119761506a0890acd1808dd5a0f10c6ae
|
3 |
+
size 24180
|
runs/Dec04_09-50-59_1f3b4092150b/events.out.tfevents.1701688189.1f3b4092150b.17971.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6cf9bb25e24cb05951fe00bf678be0b3b680f8b1cad5501eb1a2f734578ab727
|
3 |
+
size 508
|
tokenizer.json
CHANGED
@@ -52,7 +52,7 @@
|
|
52 |
"normalizer": null,
|
53 |
"pre_tokenizer": {
|
54 |
"type": "ByteLevel",
|
55 |
-
"add_prefix_space":
|
56 |
"trim_offsets": true,
|
57 |
"use_regex": true
|
58 |
},
|
@@ -67,7 +67,7 @@
|
|
67 |
0
|
68 |
],
|
69 |
"trim_offsets": true,
|
70 |
-
"add_prefix_space":
|
71 |
},
|
72 |
"decoder": {
|
73 |
"type": "ByteLevel",
|
|
|
52 |
"normalizer": null,
|
53 |
"pre_tokenizer": {
|
54 |
"type": "ByteLevel",
|
55 |
+
"add_prefix_space": true,
|
56 |
"trim_offsets": true,
|
57 |
"use_regex": true
|
58 |
},
|
|
|
67 |
0
|
68 |
],
|
69 |
"trim_offsets": true,
|
70 |
+
"add_prefix_space": true
|
71 |
},
|
72 |
"decoder": {
|
73 |
"type": "ByteLevel",
|
tokenizer_config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"add_prefix_space":
|
3 |
"added_tokens_decoder": {
|
4 |
"0": {
|
5 |
"content": "<s>",
|
|
|
1 |
{
|
2 |
+
"add_prefix_space": true,
|
3 |
"added_tokens_decoder": {
|
4 |
"0": {
|
5 |
"content": "<s>",
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4664
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d62dbe25d65322a60e61f30a77228e0a9971a13a3d8a0bc906eea153d6b820f9
|
3 |
size 4664
|