Progz commited on
Commit
a93881c
1 Parent(s): fd25a3c

MIDICausalFinetuning2

Browse files
Files changed (3) hide show
  1. README.md +101 -101
  2. generation_config.json +2 -2
  3. model.safetensors +1 -1
README.md CHANGED
@@ -13,7 +13,7 @@ should probably proofread and complete it, then remove this comment. -->
13
 
14
  This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
15
  It achieves the following results on the evaluation set:
16
- - Loss: 1.2812
17
 
18
  ## Model description
19
 
@@ -44,106 +44,106 @@ The following hyperparameters were used during training:
44
 
45
  | Training Loss | Epoch | Step | Validation Loss |
46
  |:-------------:|:-----:|:----:|:---------------:|
47
- | No log | 1.0 | 1 | 5.5830 |
48
- | No log | 2.0 | 2 | 5.1474 |
49
- | No log | 3.0 | 3 | 4.8549 |
50
- | No log | 4.0 | 4 | 4.6409 |
51
- | No log | 5.0 | 5 | 4.4759 |
52
- | No log | 6.0 | 6 | 4.3435 |
53
- | No log | 7.0 | 7 | 4.2331 |
54
- | No log | 8.0 | 8 | 4.1381 |
55
- | No log | 9.0 | 9 | 4.0533 |
56
- | No log | 10.0 | 10 | 3.9750 |
57
- | No log | 11.0 | 11 | 3.9001 |
58
- | No log | 12.0 | 12 | 3.8263 |
59
- | No log | 13.0 | 13 | 3.7525 |
60
- | No log | 14.0 | 14 | 3.6785 |
61
- | No log | 15.0 | 15 | 3.6046 |
62
- | No log | 16.0 | 16 | 3.5311 |
63
- | No log | 17.0 | 17 | 3.4599 |
64
- | No log | 18.0 | 18 | 3.3917 |
65
- | No log | 19.0 | 19 | 3.3268 |
66
- | No log | 20.0 | 20 | 3.2640 |
67
- | No log | 21.0 | 21 | 3.2019 |
68
- | No log | 22.0 | 22 | 3.1411 |
69
- | No log | 23.0 | 23 | 3.0822 |
70
- | No log | 24.0 | 24 | 3.0253 |
71
- | No log | 25.0 | 25 | 2.9707 |
72
- | No log | 26.0 | 26 | 2.9169 |
73
- | No log | 27.0 | 27 | 2.8643 |
74
- | No log | 28.0 | 28 | 2.8123 |
75
- | No log | 29.0 | 29 | 2.7615 |
76
- | No log | 30.0 | 30 | 2.7124 |
77
- | No log | 31.0 | 31 | 2.6651 |
78
- | No log | 32.0 | 32 | 2.6186 |
79
- | No log | 33.0 | 33 | 2.5727 |
80
- | No log | 34.0 | 34 | 2.5279 |
81
- | No log | 35.0 | 35 | 2.4831 |
82
- | No log | 36.0 | 36 | 2.4392 |
83
- | No log | 37.0 | 37 | 2.3967 |
84
- | No log | 38.0 | 38 | 2.3553 |
85
- | No log | 39.0 | 39 | 2.3148 |
86
- | No log | 40.0 | 40 | 2.2754 |
87
- | No log | 41.0 | 41 | 2.2373 |
88
- | No log | 42.0 | 42 | 2.1993 |
89
- | No log | 43.0 | 43 | 2.1612 |
90
- | No log | 44.0 | 44 | 2.1243 |
91
- | No log | 45.0 | 45 | 2.0881 |
92
- | No log | 46.0 | 46 | 2.0538 |
93
- | No log | 47.0 | 47 | 2.0205 |
94
- | No log | 48.0 | 48 | 1.9878 |
95
- | No log | 49.0 | 49 | 1.9546 |
96
- | No log | 50.0 | 50 | 1.9213 |
97
- | No log | 51.0 | 51 | 1.8904 |
98
- | No log | 52.0 | 52 | 1.8605 |
99
- | No log | 53.0 | 53 | 1.8316 |
100
- | No log | 54.0 | 54 | 1.8049 |
101
- | No log | 55.0 | 55 | 1.7773 |
102
- | No log | 56.0 | 56 | 1.7481 |
103
- | No log | 57.0 | 57 | 1.7197 |
104
- | No log | 58.0 | 58 | 1.6959 |
105
- | No log | 59.0 | 59 | 1.6756 |
106
- | No log | 60.0 | 60 | 1.6548 |
107
- | No log | 61.0 | 61 | 1.6319 |
108
- | No log | 62.0 | 62 | 1.6094 |
109
- | No log | 63.0 | 63 | 1.5899 |
110
- | No log | 64.0 | 64 | 1.5738 |
111
- | No log | 65.0 | 65 | 1.5581 |
112
- | No log | 66.0 | 66 | 1.5432 |
113
- | No log | 67.0 | 67 | 1.5252 |
114
- | No log | 68.0 | 68 | 1.5060 |
115
- | No log | 69.0 | 69 | 1.4876 |
116
- | No log | 70.0 | 70 | 1.4722 |
117
- | No log | 71.0 | 71 | 1.4602 |
118
- | No log | 72.0 | 72 | 1.4489 |
119
- | No log | 73.0 | 73 | 1.4370 |
120
- | No log | 74.0 | 74 | 1.4242 |
121
- | No log | 75.0 | 75 | 1.4122 |
122
- | No log | 76.0 | 76 | 1.4029 |
123
- | No log | 77.0 | 77 | 1.3952 |
124
- | No log | 78.0 | 78 | 1.3888 |
125
- | No log | 79.0 | 79 | 1.3820 |
126
- | No log | 80.0 | 80 | 1.3746 |
127
- | No log | 81.0 | 81 | 1.3658 |
128
- | No log | 82.0 | 82 | 1.3564 |
129
- | No log | 83.0 | 83 | 1.3471 |
130
- | No log | 84.0 | 84 | 1.3390 |
131
- | No log | 85.0 | 85 | 1.3316 |
132
- | No log | 86.0 | 86 | 1.3248 |
133
- | No log | 87.0 | 87 | 1.3184 |
134
- | No log | 88.0 | 88 | 1.3126 |
135
- | No log | 89.0 | 89 | 1.3072 |
136
- | No log | 90.0 | 90 | 1.3025 |
137
- | No log | 91.0 | 91 | 1.2984 |
138
- | No log | 92.0 | 92 | 1.2949 |
139
- | No log | 93.0 | 93 | 1.2921 |
140
- | No log | 94.0 | 94 | 1.2897 |
141
- | No log | 95.0 | 95 | 1.2875 |
142
- | No log | 96.0 | 96 | 1.2856 |
143
- | No log | 97.0 | 97 | 1.2840 |
144
- | No log | 98.0 | 98 | 1.2827 |
145
- | No log | 99.0 | 99 | 1.2817 |
146
- | No log | 100.0 | 100 | 1.2812 |
147
 
148
 
149
  ### Framework versions
 
13
 
14
  This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
15
  It achieves the following results on the evaluation set:
16
+ - Loss: 0.6756
17
 
18
  ## Model description
19
 
 
44
 
45
  | Training Loss | Epoch | Step | Validation Loss |
46
  |:-------------:|:-----:|:----:|:---------------:|
47
+ | No log | 1.0 | 9 | 7.7655 |
48
+ | No log | 2.0 | 18 | 6.4257 |
49
+ | No log | 3.0 | 27 | 5.4697 |
50
+ | No log | 4.0 | 36 | 4.9705 |
51
+ | No log | 5.0 | 45 | 4.7258 |
52
+ | No log | 6.0 | 54 | 4.5740 |
53
+ | No log | 7.0 | 63 | 4.4554 |
54
+ | No log | 8.0 | 72 | 4.3483 |
55
+ | No log | 9.0 | 81 | 4.2406 |
56
+ | No log | 10.0 | 90 | 4.1217 |
57
+ | No log | 11.0 | 99 | 3.9690 |
58
+ | No log | 12.0 | 108 | 3.7765 |
59
+ | No log | 13.0 | 117 | 3.6364 |
60
+ | No log | 14.0 | 126 | 3.5090 |
61
+ | No log | 15.0 | 135 | 3.4009 |
62
+ | No log | 16.0 | 144 | 3.2948 |
63
+ | No log | 17.0 | 153 | 3.1934 |
64
+ | No log | 18.0 | 162 | 3.1031 |
65
+ | No log | 19.0 | 171 | 3.0232 |
66
+ | No log | 20.0 | 180 | 2.9464 |
67
+ | No log | 21.0 | 189 | 2.8734 |
68
+ | No log | 22.0 | 198 | 2.8016 |
69
+ | No log | 23.0 | 207 | 2.7296 |
70
+ | No log | 24.0 | 216 | 2.6571 |
71
+ | No log | 25.0 | 225 | 2.5846 |
72
+ | No log | 26.0 | 234 | 2.5193 |
73
+ | No log | 27.0 | 243 | 2.4498 |
74
+ | No log | 28.0 | 252 | 2.3844 |
75
+ | No log | 29.0 | 261 | 2.3150 |
76
+ | No log | 30.0 | 270 | 2.2558 |
77
+ | No log | 31.0 | 279 | 2.1873 |
78
+ | No log | 32.0 | 288 | 2.1213 |
79
+ | No log | 33.0 | 297 | 2.0649 |
80
+ | No log | 34.0 | 306 | 1.9997 |
81
+ | No log | 35.0 | 315 | 1.9421 |
82
+ | No log | 36.0 | 324 | 1.8803 |
83
+ | No log | 37.0 | 333 | 1.8131 |
84
+ | No log | 38.0 | 342 | 1.7380 |
85
+ | No log | 39.0 | 351 | 1.6847 |
86
+ | No log | 40.0 | 360 | 1.5993 |
87
+ | No log | 41.0 | 369 | 1.5855 |
88
+ | No log | 42.0 | 378 | 1.5034 |
89
+ | No log | 43.0 | 387 | 1.4867 |
90
+ | No log | 44.0 | 396 | 1.4380 |
91
+ | No log | 45.0 | 405 | 1.4309 |
92
+ | No log | 46.0 | 414 | 1.3585 |
93
+ | No log | 47.0 | 423 | 1.3231 |
94
+ | No log | 48.0 | 432 | 1.3071 |
95
+ | No log | 49.0 | 441 | 1.2690 |
96
+ | No log | 50.0 | 450 | 1.2417 |
97
+ | No log | 51.0 | 459 | 1.2078 |
98
+ | No log | 52.0 | 468 | 1.1709 |
99
+ | No log | 53.0 | 477 | 1.1457 |
100
+ | No log | 54.0 | 486 | 1.1317 |
101
+ | No log | 55.0 | 495 | 1.1155 |
102
+ | 2.8999 | 56.0 | 504 | 1.0914 |
103
+ | 2.8999 | 57.0 | 513 | 1.0625 |
104
+ | 2.8999 | 58.0 | 522 | 1.0380 |
105
+ | 2.8999 | 59.0 | 531 | 1.0190 |
106
+ | 2.8999 | 60.0 | 540 | 0.9976 |
107
+ | 2.8999 | 61.0 | 549 | 0.9716 |
108
+ | 2.8999 | 62.0 | 558 | 0.9544 |
109
+ | 2.8999 | 63.0 | 567 | 0.9289 |
110
+ | 2.8999 | 64.0 | 576 | 0.9157 |
111
+ | 2.8999 | 65.0 | 585 | 0.8983 |
112
+ | 2.8999 | 66.0 | 594 | 0.8923 |
113
+ | 2.8999 | 67.0 | 603 | 0.8751 |
114
+ | 2.8999 | 68.0 | 612 | 0.8684 |
115
+ | 2.8999 | 69.0 | 621 | 0.8485 |
116
+ | 2.8999 | 70.0 | 630 | 0.8349 |
117
+ | 2.8999 | 71.0 | 639 | 0.8261 |
118
+ | 2.8999 | 72.0 | 648 | 0.8072 |
119
+ | 2.8999 | 73.0 | 657 | 0.8034 |
120
+ | 2.8999 | 74.0 | 666 | 0.7947 |
121
+ | 2.8999 | 75.0 | 675 | 0.7787 |
122
+ | 2.8999 | 76.0 | 684 | 0.7700 |
123
+ | 2.8999 | 77.0 | 693 | 0.7581 |
124
+ | 2.8999 | 78.0 | 702 | 0.7577 |
125
+ | 2.8999 | 79.0 | 711 | 0.7472 |
126
+ | 2.8999 | 80.0 | 720 | 0.7514 |
127
+ | 2.8999 | 81.0 | 729 | 0.7317 |
128
+ | 2.8999 | 82.0 | 738 | 0.7334 |
129
+ | 2.8999 | 83.0 | 747 | 0.7233 |
130
+ | 2.8999 | 84.0 | 756 | 0.7148 |
131
+ | 2.8999 | 85.0 | 765 | 0.7139 |
132
+ | 2.8999 | 86.0 | 774 | 0.7048 |
133
+ | 2.8999 | 87.0 | 783 | 0.7033 |
134
+ | 2.8999 | 88.0 | 792 | 0.6972 |
135
+ | 2.8999 | 89.0 | 801 | 0.6946 |
136
+ | 2.8999 | 90.0 | 810 | 0.6899 |
137
+ | 2.8999 | 91.0 | 819 | 0.6867 |
138
+ | 2.8999 | 92.0 | 828 | 0.6852 |
139
+ | 2.8999 | 93.0 | 837 | 0.6855 |
140
+ | 2.8999 | 94.0 | 846 | 0.6815 |
141
+ | 2.8999 | 95.0 | 855 | 0.6793 |
142
+ | 2.8999 | 96.0 | 864 | 0.6782 |
143
+ | 2.8999 | 97.0 | 873 | 0.6754 |
144
+ | 2.8999 | 98.0 | 882 | 0.6763 |
145
+ | 2.8999 | 99.0 | 891 | 0.6758 |
146
+ | 2.8999 | 100.0 | 900 | 0.6756 |
147
 
148
 
149
  ### Framework versions
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 90,
4
- "eos_token_id": 90,
5
  "transformers_version": "4.41.2"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
  "transformers_version": "4.41.2"
6
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:20a175116bc7f7f927f85a7be6e0614335ae0b308086f6d7b67cbeaae6fad79d
3
  size 128265152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62bd112c75278bbe244e689ce5bb0482c5c8a65dd828575c99024ffc56da379e
3
  size 128265152