cuba6112 commited on
Commit
aeab63c
1 Parent(s): 7bc461b

Fine-tuned GPT-2 on Wikitext-2

Browse files
README.md CHANGED
@@ -1,402 +1,92 @@
1
- ---
2
- license: mit
3
- base_model: cuba6112/orion
4
- tags:
5
- - generated_from_trainer
6
- model-index:
7
- - name: orion
8
- results: []
9
- ---
10
-
11
- <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
- should probably proofread and complete it, then remove this comment. -->
13
-
14
- # orion
15
-
16
- This model is a fine-tuned version of [cuba6112/orion](https://huggingface.co/cuba6112/orion) on an unknown dataset.
17
- It achieves the following results on the evaluation set:
18
- - Loss: 3.3813
19
-
20
- ## Model description
21
-
22
- More information needed
23
-
24
- ## Intended uses & limitations
25
-
26
- More information needed
27
-
28
- ## Training and evaluation data
29
-
30
- More information needed
31
-
32
- ## Training procedure
33
-
34
- ### Training hyperparameters
35
-
36
- The following hyperparameters were used during training:
37
- - learning_rate: 2e-05
38
- - train_batch_size: 8
39
- - eval_batch_size: 8
40
- - seed: 42
41
- - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
42
- - lr_scheduler_type: linear
43
- - lr_scheduler_warmup_steps: 500
44
- - num_epochs: 30
45
- - mixed_precision_training: Native AMP
46
-
47
- ### Training results
48
-
49
- | Training Loss | Epoch | Step | Validation Loss |
50
- |:-------------:|:-------:|:------:|:---------------:|
51
- | No log | 0.0871 | 400 | 3.0498 |
52
- | 2.1987 | 0.1743 | 800 | 3.2218 |
53
- | 1.682 | 0.2614 | 1200 | 3.2123 |
54
- | 1.7259 | 0.3486 | 1600 | 3.2036 |
55
- | 1.996 | 0.4357 | 2000 | 3.0685 |
56
- | 1.996 | 0.5229 | 2400 | 3.0753 |
57
- | 2.1139 | 0.6100 | 2800 | 3.0261 |
58
- | 2.1855 | 0.6972 | 3200 | 2.9876 |
59
- | 2.3493 | 0.7843 | 3600 | 2.9616 |
60
- | 2.4411 | 0.8715 | 4000 | 2.9416 |
61
- | 2.4411 | 0.9586 | 4400 | 2.9191 |
62
- | 2.5409 | 1.0458 | 4800 | 2.9867 |
63
- | 2.3445 | 1.1329 | 5200 | 2.9837 |
64
- | 2.3134 | 1.2200 | 5600 | 2.9847 |
65
- | 2.312 | 1.3072 | 6000 | 2.9661 |
66
- | 2.312 | 1.3943 | 6400 | 2.9673 |
67
- | 2.3304 | 1.4815 | 6800 | 2.9659 |
68
- | 2.3239 | 1.5686 | 7200 | 2.9541 |
69
- | 2.3673 | 1.6558 | 7600 | 2.9694 |
70
- | 2.3503 | 1.7429 | 8000 | 2.9608 |
71
- | 2.3503 | 1.8301 | 8400 | 2.9436 |
72
- | 2.411 | 1.9172 | 8800 | 2.9596 |
73
- | 2.3752 | 2.0044 | 9200 | 2.9666 |
74
- | 2.3068 | 2.0915 | 9600 | 2.9763 |
75
- | 2.2758 | 2.1786 | 10000 | 2.9779 |
76
- | 2.2758 | 2.2658 | 10400 | 2.9780 |
77
- | 2.3147 | 2.3529 | 10800 | 2.9708 |
78
- | 2.3042 | 2.4401 | 11200 | 2.9562 |
79
- | 2.326 | 2.5272 | 11600 | 2.9669 |
80
- | 2.3543 | 2.6144 | 12000 | 2.9586 |
81
- | 2.3543 | 2.7015 | 12400 | 2.9503 |
82
- | 2.3478 | 2.7887 | 12800 | 2.9551 |
83
- | 2.3671 | 2.8758 | 13200 | 2.9495 |
84
- | 2.3881 | 2.9630 | 13600 | 2.9487 |
85
- | 2.3464 | 3.0501 | 14000 | 2.9822 |
86
- | 2.3464 | 3.1373 | 14400 | 2.9787 |
87
- | 2.2827 | 3.2244 | 14800 | 2.9811 |
88
- | 2.3025 | 3.3115 | 15200 | 2.9744 |
89
- | 2.3032 | 3.3987 | 15600 | 2.9633 |
90
- | 2.2829 | 3.4858 | 16000 | 2.9682 |
91
- | 2.2829 | 3.5730 | 16400 | 2.9691 |
92
- | 2.3227 | 3.6601 | 16800 | 2.9705 |
93
- | 2.304 | 3.7473 | 17200 | 2.9695 |
94
- | 2.3249 | 3.8344 | 17600 | 2.9564 |
95
- | 2.3228 | 3.9216 | 18000 | 2.9707 |
96
- | 2.3228 | 4.0087 | 18400 | 2.9947 |
97
- | 2.303 | 4.0959 | 18800 | 2.9898 |
98
- | 2.2062 | 4.1830 | 19200 | 2.9936 |
99
- | 2.2194 | 4.2702 | 19600 | 2.9913 |
100
- | 2.2357 | 4.3573 | 20000 | 3.0012 |
101
- | 2.2357 | 4.4444 | 20400 | 2.9915 |
102
- | 2.2628 | 4.5316 | 20800 | 2.9880 |
103
- | 2.26 | 4.6187 | 21200 | 2.9777 |
104
- | 2.2893 | 4.7059 | 21600 | 2.9920 |
105
- | 2.2587 | 4.7930 | 22000 | 2.9791 |
106
- | 2.2587 | 4.8802 | 22400 | 2.9806 |
107
- | 2.2752 | 4.9673 | 22800 | 2.9791 |
108
- | 2.2541 | 5.0545 | 23200 | 3.0167 |
109
- | 2.1708 | 5.1416 | 23600 | 3.0109 |
110
- | 2.1866 | 5.2288 | 24000 | 3.0121 |
111
- | 2.1866 | 5.3159 | 24400 | 3.0081 |
112
- | 2.1906 | 5.4031 | 24800 | 3.0166 |
113
- | 2.1882 | 5.4902 | 25200 | 3.0160 |
114
- | 2.2057 | 5.5773 | 25600 | 3.0014 |
115
- | 2.2106 | 5.6645 | 26000 | 3.0005 |
116
- | 2.2106 | 5.7516 | 26400 | 3.0028 |
117
- | 2.2293 | 5.8388 | 26800 | 3.0156 |
118
- | 2.1999 | 5.9259 | 27200 | 3.0012 |
119
- | 2.2221 | 6.0131 | 27600 | 3.0241 |
120
- | 2.1506 | 6.1002 | 28000 | 3.0365 |
121
- | 2.1506 | 6.1874 | 28400 | 3.0434 |
122
- | 2.1511 | 6.2745 | 28800 | 3.0286 |
123
- | 2.1449 | 6.3617 | 29200 | 3.0477 |
124
- | 2.1298 | 6.4488 | 29600 | 3.0356 |
125
- | 2.166 | 6.5359 | 30000 | 3.0365 |
126
- | 2.166 | 6.6231 | 30400 | 3.0301 |
127
- | 2.1646 | 6.7102 | 30800 | 3.0291 |
128
- | 2.14 | 6.7974 | 31200 | 3.0259 |
129
- | 2.1597 | 6.8845 | 31600 | 3.0171 |
130
- | 2.1661 | 6.9717 | 32000 | 3.0316 |
131
- | 2.1661 | 7.0588 | 32400 | 3.0620 |
132
- | 2.1108 | 7.1460 | 32800 | 3.0650 |
133
- | 2.1125 | 7.2331 | 33200 | 3.0554 |
134
- | 2.112 | 7.3203 | 33600 | 3.0693 |
135
- | 2.0923 | 7.4074 | 34000 | 3.0634 |
136
- | 2.0923 | 7.4946 | 34400 | 3.0530 |
137
- | 2.114 | 7.5817 | 34800 | 3.0421 |
138
- | 2.1223 | 7.6688 | 35200 | 3.0439 |
139
- | 2.1027 | 7.7560 | 35600 | 3.0478 |
140
- | 2.1345 | 7.8431 | 36000 | 3.0432 |
141
- | 2.1345 | 7.9303 | 36400 | 3.0441 |
142
- | 2.1327 | 8.0174 | 36800 | 3.0732 |
143
- | 2.0844 | 8.1046 | 37200 | 3.0709 |
144
- | 2.0634 | 8.1917 | 37600 | 3.0767 |
145
- | 2.0644 | 8.2789 | 38000 | 3.0821 |
146
- | 2.0644 | 8.3660 | 38400 | 3.0742 |
147
- | 2.0724 | 8.4532 | 38800 | 3.0888 |
148
- | 2.0661 | 8.5403 | 39200 | 3.0760 |
149
- | 2.0706 | 8.6275 | 39600 | 3.0767 |
150
- | 2.0673 | 8.7146 | 40000 | 3.0632 |
151
- | 2.0673 | 8.8017 | 40400 | 3.0682 |
152
- | 2.0982 | 8.8889 | 40800 | 3.0741 |
153
- | 2.0998 | 8.9760 | 41200 | 3.0715 |
154
- | 2.0553 | 9.0632 | 41600 | 3.1081 |
155
- | 2.0268 | 9.1503 | 42000 | 3.1013 |
156
- | 2.0268 | 9.2375 | 42400 | 3.1060 |
157
- | 2.0072 | 9.3246 | 42800 | 3.0983 |
158
- | 2.0263 | 9.4118 | 43200 | 3.0981 |
159
- | 2.0114 | 9.4989 | 43600 | 3.0915 |
160
- | 2.0314 | 9.5861 | 44000 | 3.0973 |
161
- | 2.0314 | 9.6732 | 44400 | 3.1053 |
162
- | 2.0413 | 9.7603 | 44800 | 3.0982 |
163
- | 2.0425 | 9.8475 | 45200 | 3.1079 |
164
- | 2.0354 | 9.9346 | 45600 | 3.0982 |
165
- | 2.0485 | 10.0218 | 46000 | 3.1200 |
166
- | 2.0485 | 10.1089 | 46400 | 3.1409 |
167
- | 1.9523 | 10.1961 | 46800 | 3.1287 |
168
- | 1.9911 | 10.2832 | 47200 | 3.1222 |
169
- | 1.9842 | 10.3704 | 47600 | 3.1362 |
170
- | 1.9892 | 10.4575 | 48000 | 3.1157 |
171
- | 1.9892 | 10.5447 | 48400 | 3.1314 |
172
- | 1.9867 | 10.6318 | 48800 | 3.1089 |
173
- | 2.0033 | 10.7190 | 49200 | 3.1138 |
174
- | 2.0057 | 10.8061 | 49600 | 3.1332 |
175
- | 1.9959 | 10.8932 | 50000 | 3.1280 |
176
- | 1.9959 | 10.9804 | 50400 | 3.1232 |
177
- | 1.9956 | 11.0675 | 50800 | 3.1424 |
178
- | 1.951 | 11.1547 | 51200 | 3.1473 |
179
- | 1.9491 | 11.2418 | 51600 | 3.1429 |
180
- | 1.9612 | 11.3290 | 52000 | 3.1451 |
181
- | 1.9612 | 11.4161 | 52400 | 3.1591 |
182
- | 1.971 | 11.5033 | 52800 | 3.1558 |
183
- | 1.9582 | 11.5904 | 53200 | 3.1337 |
184
- | 1.9675 | 11.6776 | 53600 | 3.1459 |
185
- | 1.9529 | 11.7647 | 54000 | 3.1416 |
186
- | 1.9529 | 11.8519 | 54400 | 3.1484 |
187
- | 1.9572 | 11.9390 | 54800 | 3.1465 |
188
- | 1.989 | 12.0261 | 55200 | 3.1638 |
189
- | 1.9232 | 12.1133 | 55600 | 3.1767 |
190
- | 1.9115 | 12.2004 | 56000 | 3.1617 |
191
- | 1.9115 | 12.2876 | 56400 | 3.1688 |
192
- | 1.9082 | 12.3747 | 56800 | 3.1755 |
193
- | 1.9193 | 12.4619 | 57200 | 3.1826 |
194
- | 1.9188 | 12.5490 | 57600 | 3.1732 |
195
- | 1.9367 | 12.6362 | 58000 | 3.1784 |
196
- | 1.9367 | 12.7233 | 58400 | 3.1680 |
197
- | 1.9442 | 12.8105 | 58800 | 3.1649 |
198
- | 1.9402 | 12.8976 | 59200 | 3.1517 |
199
- | 1.9426 | 12.9847 | 59600 | 3.1584 |
200
- | 1.9087 | 13.0719 | 60000 | 3.1833 |
201
- | 1.9087 | 13.1590 | 60400 | 3.1808 |
202
- | 1.888 | 13.2462 | 60800 | 3.1818 |
203
- | 1.8757 | 13.3333 | 61200 | 3.2008 |
204
- | 1.8792 | 13.4205 | 61600 | 3.2086 |
205
- | 1.8875 | 13.5076 | 62000 | 3.1938 |
206
- | 1.8875 | 13.5948 | 62400 | 3.1910 |
207
- | 1.8815 | 13.6819 | 62800 | 3.1948 |
208
- | 1.9077 | 13.7691 | 63200 | 3.1791 |
209
- | 1.9038 | 13.8562 | 63600 | 3.1817 |
210
- | 1.9153 | 13.9434 | 64000 | 3.1879 |
211
- | 1.9153 | 14.0305 | 64400 | 3.2086 |
212
- | 1.8823 | 14.1176 | 64800 | 3.2081 |
213
- | 1.8672 | 14.2048 | 65200 | 3.2079 |
214
- | 1.8373 | 14.2919 | 65600 | 3.1995 |
215
- | 1.8498 | 14.3791 | 66000 | 3.2020 |
216
- | 1.8498 | 14.4662 | 66400 | 3.1984 |
217
- | 1.8694 | 14.5534 | 66800 | 3.2120 |
218
- | 1.8736 | 14.6405 | 67200 | 3.2019 |
219
- | 1.8702 | 14.7277 | 67600 | 3.2145 |
220
- | 1.8839 | 14.8148 | 68000 | 3.2128 |
221
- | 1.8839 | 14.9020 | 68400 | 3.2141 |
222
- | 1.8872 | 14.9891 | 68800 | 3.2037 |
223
- | 1.8689 | 15.0763 | 69200 | 3.2270 |
224
- | 1.8321 | 15.1634 | 69600 | 3.2363 |
225
- | 1.8226 | 15.2505 | 70000 | 3.2272 |
226
- | 1.8226 | 15.3377 | 70400 | 3.2378 |
227
- | 1.8367 | 15.4248 | 70800 | 3.2220 |
228
- | 1.8403 | 15.5120 | 71200 | 3.2190 |
229
- | 1.8473 | 15.5991 | 71600 | 3.2261 |
230
- | 1.83 | 15.6863 | 72000 | 3.2246 |
231
- | 1.83 | 15.7734 | 72400 | 3.2292 |
232
- | 1.8554 | 15.8606 | 72800 | 3.2194 |
233
- | 1.8569 | 15.9477 | 73200 | 3.2239 |
234
- | 1.8493 | 16.0349 | 73600 | 3.2449 |
235
- | 1.8066 | 16.1220 | 74000 | 3.2532 |
236
- | 1.8066 | 16.2092 | 74400 | 3.2525 |
237
- | 1.7883 | 16.2963 | 74800 | 3.2499 |
238
- | 1.8053 | 16.3834 | 75200 | 3.2533 |
239
- | 1.8036 | 16.4706 | 75600 | 3.2437 |
240
- | 1.8197 | 16.5577 | 76000 | 3.2443 |
241
- | 1.8197 | 16.6449 | 76400 | 3.2466 |
242
- | 1.8069 | 16.7320 | 76800 | 3.2301 |
243
- | 1.8108 | 16.8192 | 77200 | 3.2422 |
244
- | 1.83 | 16.9063 | 77600 | 3.2477 |
245
- | 1.8424 | 16.9935 | 78000 | 3.2489 |
246
- | 1.8424 | 17.0806 | 78400 | 3.2720 |
247
- | 1.7802 | 17.1678 | 78800 | 3.2764 |
248
- | 1.7698 | 17.2549 | 79200 | 3.2724 |
249
- | 1.7883 | 17.3420 | 79600 | 3.2668 |
250
- | 1.8066 | 17.4292 | 80000 | 3.2590 |
251
- | 1.8066 | 17.5163 | 80400 | 3.2698 |
252
- | 1.7783 | 17.6035 | 80800 | 3.2650 |
253
- | 1.7753 | 17.6906 | 81200 | 3.2656 |
254
- | 1.8071 | 17.7778 | 81600 | 3.2589 |
255
- | 1.8148 | 17.8649 | 82000 | 3.2583 |
256
- | 1.8148 | 17.9521 | 82400 | 3.2603 |
257
- | 1.8088 | 18.0392 | 82800 | 3.2774 |
258
- | 1.7732 | 18.1264 | 83200 | 3.2715 |
259
- | 1.7401 | 18.2135 | 83600 | 3.2806 |
260
- | 1.7522 | 18.3007 | 84000 | 3.2847 |
261
- | 1.7522 | 18.3878 | 84400 | 3.2842 |
262
- | 1.7704 | 18.4749 | 84800 | 3.2705 |
263
- | 1.7778 | 18.5621 | 85200 | 3.2813 |
264
- | 1.7811 | 18.6492 | 85600 | 3.2771 |
265
- | 1.7706 | 18.7364 | 86000 | 3.2808 |
266
- | 1.7706 | 18.8235 | 86400 | 3.2833 |
267
- | 1.7669 | 18.9107 | 86800 | 3.2800 |
268
- | 1.7831 | 18.9978 | 87200 | 3.2816 |
269
- | 1.7559 | 19.0850 | 87600 | 3.3049 |
270
- | 1.7288 | 19.1721 | 88000 | 3.2914 |
271
- | 1.7288 | 19.2593 | 88400 | 3.2865 |
272
- | 1.7625 | 19.3464 | 88800 | 3.3103 |
273
- | 1.7507 | 19.4336 | 89200 | 3.2994 |
274
- | 1.7422 | 19.5207 | 89600 | 3.3055 |
275
- | 1.745 | 19.6078 | 90000 | 3.2948 |
276
- | 1.745 | 19.6950 | 90400 | 3.2899 |
277
- | 1.7514 | 19.7821 | 90800 | 3.3007 |
278
- | 1.7674 | 19.8693 | 91200 | 3.2860 |
279
- | 1.7584 | 19.9564 | 91600 | 3.2932 |
280
- | 1.7563 | 20.0436 | 92000 | 3.2996 |
281
- | 1.7563 | 20.1307 | 92400 | 3.3120 |
282
- | 1.7272 | 20.2179 | 92800 | 3.3208 |
283
- | 1.7268 | 20.3050 | 93200 | 3.3080 |
284
- | 1.7156 | 20.3922 | 93600 | 3.3193 |
285
- | 1.7437 | 20.4793 | 94000 | 3.3151 |
286
- | 1.7437 | 20.5664 | 94400 | 3.2991 |
287
- | 1.7282 | 20.6536 | 94800 | 3.3088 |
288
- | 1.7532 | 20.7407 | 95200 | 3.3054 |
289
- | 1.7394 | 20.8279 | 95600 | 3.3044 |
290
- | 1.7238 | 20.9150 | 96000 | 3.3098 |
291
- | 1.7238 | 21.0022 | 96400 | 3.3157 |
292
- | 1.7179 | 21.0893 | 96800 | 3.3328 |
293
- | 1.6887 | 21.1765 | 97200 | 3.3292 |
294
- | 1.7301 | 21.2636 | 97600 | 3.3223 |
295
- | 1.7126 | 21.3508 | 98000 | 3.3146 |
296
- | 1.7126 | 21.4379 | 98400 | 3.3177 |
297
- | 1.7163 | 21.5251 | 98800 | 3.3153 |
298
- | 1.7132 | 21.6122 | 99200 | 3.3180 |
299
- | 1.7128 | 21.6993 | 99600 | 3.3167 |
300
- | 1.7088 | 21.7865 | 100000 | 3.3214 |
301
- | 1.7088 | 21.8736 | 100400 | 3.3187 |
302
- | 1.7205 | 21.9608 | 100800 | 3.3214 |
303
- | 1.7121 | 22.0479 | 101200 | 3.3374 |
304
- | 1.6778 | 22.1351 | 101600 | 3.3358 |
305
- | 1.7013 | 22.2222 | 102000 | 3.3389 |
306
- | 1.7013 | 22.3094 | 102400 | 3.3322 |
307
- | 1.6742 | 22.3965 | 102800 | 3.3478 |
308
- | 1.6972 | 22.4837 | 103200 | 3.3255 |
309
- | 1.6951 | 22.5708 | 103600 | 3.3325 |
310
- | 1.7017 | 22.6580 | 104000 | 3.3424 |
311
- | 1.7017 | 22.7451 | 104400 | 3.3256 |
312
- | 1.7041 | 22.8322 | 104800 | 3.3353 |
313
- | 1.7165 | 22.9194 | 105200 | 3.3224 |
314
- | 1.724 | 23.0065 | 105600 | 3.3278 |
315
- | 1.6814 | 23.0937 | 106000 | 3.3438 |
316
- | 1.6814 | 23.1808 | 106400 | 3.3510 |
317
- | 1.6667 | 23.2680 | 106800 | 3.3436 |
318
- | 1.6851 | 23.3551 | 107200 | 3.3529 |
319
- | 1.692 | 23.4423 | 107600 | 3.3422 |
320
- | 1.6977 | 23.5294 | 108000 | 3.3397 |
321
- | 1.6977 | 23.6166 | 108400 | 3.3527 |
322
- | 1.6783 | 23.7037 | 108800 | 3.3499 |
323
- | 1.6655 | 23.7908 | 109200 | 3.3488 |
324
- | 1.699 | 23.8780 | 109600 | 3.3396 |
325
- | 1.6932 | 23.9651 | 110000 | 3.3543 |
326
- | 1.6932 | 24.0523 | 110400 | 3.3645 |
327
- | 1.6702 | 24.1394 | 110800 | 3.3517 |
328
- | 1.6763 | 24.2266 | 111200 | 3.3539 |
329
- | 1.6535 | 24.3137 | 111600 | 3.3473 |
330
- | 1.6847 | 24.4009 | 112000 | 3.3527 |
331
- | 1.6847 | 24.4880 | 112400 | 3.3483 |
332
- | 1.6862 | 24.5752 | 112800 | 3.3540 |
333
- | 1.6829 | 24.6623 | 113200 | 3.3592 |
334
- | 1.6698 | 24.7495 | 113600 | 3.3534 |
335
- | 1.671 | 24.8366 | 114000 | 3.3538 |
336
- | 1.671 | 24.9237 | 114400 | 3.3509 |
337
- | 1.6707 | 25.0109 | 114800 | 3.3563 |
338
- | 1.6657 | 25.0980 | 115200 | 3.3647 |
339
- | 1.6421 | 25.1852 | 115600 | 3.3676 |
340
- | 1.6474 | 25.2723 | 116000 | 3.3669 |
341
- | 1.6474 | 25.3595 | 116400 | 3.3651 |
342
- | 1.6575 | 25.4466 | 116800 | 3.3681 |
343
- | 1.657 | 25.5338 | 117200 | 3.3681 |
344
- | 1.6709 | 25.6209 | 117600 | 3.3647 |
345
- | 1.6699 | 25.7081 | 118000 | 3.3609 |
346
- | 1.6699 | 25.7952 | 118400 | 3.3599 |
347
- | 1.6567 | 25.8824 | 118800 | 3.3652 |
348
- | 1.6683 | 25.9695 | 119200 | 3.3619 |
349
- | 1.666 | 26.0566 | 119600 | 3.3621 |
350
- | 1.6508 | 26.1438 | 120000 | 3.3709 |
351
- | 1.6508 | 26.2309 | 120400 | 3.3687 |
352
- | 1.6385 | 26.3181 | 120800 | 3.3709 |
353
- | 1.6645 | 26.4052 | 121200 | 3.3708 |
354
- | 1.637 | 26.4924 | 121600 | 3.3776 |
355
- | 1.6475 | 26.5795 | 122000 | 3.3674 |
356
- | 1.6475 | 26.6667 | 122400 | 3.3709 |
357
- | 1.6593 | 26.7538 | 122800 | 3.3733 |
358
- | 1.6528 | 26.8410 | 123200 | 3.3718 |
359
- | 1.6492 | 26.9281 | 123600 | 3.3709 |
360
- | 1.6668 | 27.0153 | 124000 | 3.3713 |
361
- | 1.6668 | 27.1024 | 124400 | 3.3744 |
362
- | 1.6439 | 27.1895 | 124800 | 3.3748 |
363
- | 1.6504 | 27.2767 | 125200 | 3.3802 |
364
- | 1.6262 | 27.3638 | 125600 | 3.3787 |
365
- | 1.646 | 27.4510 | 126000 | 3.3718 |
366
- | 1.646 | 27.5381 | 126400 | 3.3762 |
367
- | 1.6387 | 27.6253 | 126800 | 3.3775 |
368
- | 1.6484 | 27.7124 | 127200 | 3.3759 |
369
- | 1.6446 | 27.7996 | 127600 | 3.3745 |
370
- | 1.6625 | 27.8867 | 128000 | 3.3716 |
371
- | 1.6625 | 27.9739 | 128400 | 3.3691 |
372
- | 1.6485 | 28.0610 | 128800 | 3.3810 |
373
- | 1.6339 | 28.1481 | 129200 | 3.3834 |
374
- | 1.6157 | 28.2353 | 129600 | 3.3818 |
375
- | 1.6309 | 28.3224 | 130000 | 3.3827 |
376
- | 1.6309 | 28.4096 | 130400 | 3.3793 |
377
- | 1.6439 | 28.4967 | 130800 | 3.3812 |
378
- | 1.6351 | 28.5839 | 131200 | 3.3806 |
379
- | 1.6375 | 28.6710 | 131600 | 3.3807 |
380
- | 1.6395 | 28.7582 | 132000 | 3.3780 |
381
- | 1.6395 | 28.8453 | 132400 | 3.3768 |
382
- | 1.6576 | 28.9325 | 132800 | 3.3756 |
383
- | 1.6431 | 29.0196 | 133200 | 3.3782 |
384
- | 1.6308 | 29.1068 | 133600 | 3.3782 |
385
- | 1.66 | 29.1939 | 134000 | 3.3769 |
386
- | 1.66 | 29.2810 | 134400 | 3.3817 |
387
- | 1.6188 | 29.3682 | 134800 | 3.3824 |
388
- | 1.6435 | 29.4553 | 135200 | 3.3831 |
389
- | 1.6169 | 29.5425 | 135600 | 3.3819 |
390
- | 1.6506 | 29.6296 | 136000 | 3.3788 |
391
- | 1.6506 | 29.7168 | 136400 | 3.3802 |
392
- | 1.6318 | 29.8039 | 136800 | 3.3806 |
393
- | 1.6155 | 29.8911 | 137200 | 3.3810 |
394
- | 1.6292 | 29.9782 | 137600 | 3.3813 |
395
-
396
-
397
- ### Framework versions
398
-
399
- - Transformers 4.40.0
400
- - Pytorch 2.3.1+cu118
401
- - Datasets 2.20.0
402
- - Tokenizers 0.19.1
 
1
+ ---
2
+ license: mit
3
+ base_model: cuba6112/orion
4
+ tags:
5
+ - generated_from_trainer
6
+ model-index:
7
+ - name: orion
8
+ results: []
9
+ ---
10
+
11
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
12
+ should probably proofread and complete it, then remove this comment. -->
13
+
14
+ # orion
15
+
16
+ This model is a fine-tuned version of [cuba6112/orion](https://huggingface.co/cuba6112/orion) on an unknown dataset.
17
+ It achieves the following results on the evaluation set:
18
+ - Loss: 3.5251
19
+
20
+ ## Model description
21
+
22
+ More information needed
23
+
24
+ ## Intended uses & limitations
25
+
26
+ More information needed
27
+
28
+ ## Training and evaluation data
29
+
30
+ More information needed
31
+
32
+ ## Training procedure
33
+
34
+ ### Training hyperparameters
35
+
36
+ The following hyperparameters were used during training:
37
+ - learning_rate: 2e-05
38
+ - train_batch_size: 8
39
+ - eval_batch_size: 8
40
+ - seed: 42
41
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
42
+ - lr_scheduler_type: linear
43
+ - lr_scheduler_warmup_steps: 500
44
+ - num_epochs: 3
45
+ - mixed_precision_training: Native AMP
46
+
47
+ ### Training results
48
+
49
+ | Training Loss | Epoch | Step | Validation Loss |
50
+ |:-------------:|:------:|:-----:|:---------------:|
51
+ | No log | 0.0871 | 400 | 3.5515 |
52
+ | 1.4611 | 0.1743 | 800 | 3.6262 |
53
+ | 1.174 | 0.2614 | 1200 | 3.6164 |
54
+ | 1.2082 | 0.3486 | 1600 | 3.6236 |
55
+ | 1.4301 | 0.4357 | 2000 | 3.5137 |
56
+ | 1.4301 | 0.5229 | 2400 | 3.5171 |
57
+ | 1.4987 | 0.6100 | 2800 | 3.5004 |
58
+ | 1.518 | 0.6972 | 3200 | 3.4667 |
59
+ | 1.5859 | 0.7843 | 3600 | 3.4521 |
60
+ | 1.6333 | 0.8715 | 4000 | 3.4452 |
61
+ | 1.6333 | 0.9586 | 4400 | 3.4300 |
62
+ | 1.6698 | 1.0458 | 4800 | 3.5143 |
63
+ | 1.4993 | 1.1329 | 5200 | 3.5234 |
64
+ | 1.4858 | 1.2200 | 5600 | 3.5240 |
65
+ | 1.4804 | 1.3072 | 6000 | 3.4979 |
66
+ | 1.4804 | 1.3943 | 6400 | 3.5131 |
67
+ | 1.4814 | 1.4815 | 6800 | 3.5177 |
68
+ | 1.478 | 1.5686 | 7200 | 3.4989 |
69
+ | 1.5073 | 1.6558 | 7600 | 3.5158 |
70
+ | 1.4952 | 1.7429 | 8000 | 3.5145 |
71
+ | 1.4952 | 1.8301 | 8400 | 3.4975 |
72
+ | 1.5367 | 1.9172 | 8800 | 3.5075 |
73
+ | 1.5085 | 2.0044 | 9200 | 3.5058 |
74
+ | 1.462 | 2.0915 | 9600 | 3.5352 |
75
+ | 1.4378 | 2.1786 | 10000 | 3.5335 |
76
+ | 1.4378 | 2.2658 | 10400 | 3.5378 |
77
+ | 1.4514 | 2.3529 | 10800 | 3.5383 |
78
+ | 1.448 | 2.4401 | 11200 | 3.5369 |
79
+ | 1.46 | 2.5272 | 11600 | 3.5361 |
80
+ | 1.4722 | 2.6144 | 12000 | 3.5337 |
81
+ | 1.4722 | 2.7015 | 12400 | 3.5277 |
82
+ | 1.4726 | 2.7887 | 12800 | 3.5284 |
83
+ | 1.4829 | 2.8758 | 13200 | 3.5257 |
84
+ | 1.4963 | 2.9630 | 13600 | 3.5253 |
85
+
86
+
87
+ ### Framework versions
88
+
89
+ - Transformers 4.41.2
90
+ - Pytorch 2.3.0+cu121
91
+ - Datasets 2.20.0
92
+ - Tokenizers 0.19.1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json CHANGED
@@ -1,6 +1,6 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 50256,
4
- "eos_token_id": 50256,
5
- "transformers_version": "4.40.0"
6
- }
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
+ "transformers_version": "4.41.2"
6
+ }
merges.txt CHANGED
The diff for this file is too large to render. See raw diff
 
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:45940a637bca3545463909a730eee20e28238293e9e99ac77590cab0d3eebdb5
3
  size 497774208
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37182c387fdc0f5b88c3edb15ec206609aa661d16e56f429a69ebe362d66ddb2
3
  size 497774208
runs/Jul03_12-04-17_7de6f6a510b5/events.out.tfevents.1720008258.7de6f6a510b5.789.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:02ef121632e1258088ecccfbbd4f7d6ad48f4484862af647335f8a19a8468ca9
3
- size 19995
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:04137f33011670e7b9adff652ccc477c19a4252b60a3d256f2e5146057791403
3
+ size 20349
runs/Jul03_12-04-17_7de6f6a510b5/events.out.tfevents.1720010718.7de6f6a510b5.789.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d601ce2c510d89cfc557feaa4417643ad76a6fb4d7dcd012e67614d2ba3a7d8
3
+ size 359
special_tokens_map.json CHANGED
@@ -1,24 +1,24 @@
1
- {
2
- "bos_token": {
3
- "content": "<|endoftext|>",
4
- "lstrip": false,
5
- "normalized": true,
6
- "rstrip": false,
7
- "single_word": false
8
- },
9
- "eos_token": {
10
- "content": "<|endoftext|>",
11
- "lstrip": false,
12
- "normalized": true,
13
- "rstrip": false,
14
- "single_word": false
15
- },
16
- "pad_token": "<|endoftext|>",
17
- "unk_token": {
18
- "content": "<|endoftext|>",
19
- "lstrip": false,
20
- "normalized": true,
21
- "rstrip": false,
22
- "single_word": false
23
- }
24
- }
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<|endoftext|>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|endoftext|>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<|endoftext|>",
17
+ "unk_token": {
18
+ "content": "<|endoftext|>",
19
+ "lstrip": false,
20
+ "normalized": true,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
tokenizer_config.json CHANGED
@@ -1,22 +1,22 @@
1
- {
2
- "add_bos_token": false,
3
- "add_prefix_space": false,
4
- "added_tokens_decoder": {
5
- "50256": {
6
- "content": "<|endoftext|>",
7
- "lstrip": false,
8
- "normalized": true,
9
- "rstrip": false,
10
- "single_word": false,
11
- "special": true
12
- }
13
- },
14
- "bos_token": "<|endoftext|>",
15
- "clean_up_tokenization_spaces": true,
16
- "eos_token": "<|endoftext|>",
17
- "errors": "replace",
18
- "model_max_length": 1024,
19
- "pad_token": "<|endoftext|>",
20
- "tokenizer_class": "GPT2Tokenizer",
21
- "unk_token": "<|endoftext|>"
22
- }
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "50256": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ }
13
+ },
14
+ "bos_token": "<|endoftext|>",
15
+ "clean_up_tokenization_spaces": true,
16
+ "eos_token": "<|endoftext|>",
17
+ "errors": "replace",
18
+ "model_max_length": 1024,
19
+ "pad_token": "<|endoftext|>",
20
+ "tokenizer_class": "GPT2Tokenizer",
21
+ "unk_token": "<|endoftext|>"
22
+ }
vocab.json CHANGED
The diff for this file is too large to render. See raw diff