|
{ |
|
"best_metric": 1.6559995412826538, |
|
"best_model_checkpoint": "./Biggie-SmoLlm-0.15B-AdamW/checkpoint-1000", |
|
"epoch": 3.0, |
|
"eval_steps": 200, |
|
"global_step": 1017, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0029498525073746312, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.1014, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0058997050147492625, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.1526, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.008849557522123894, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 2e-05, |
|
"loss": 2.0517, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.011799410029498525, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.9602, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.014749262536873156, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.9619, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 4e-05, |
|
"loss": 1.9421, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02064896755162242, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 2.0735, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02359882005899705, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 2.1505, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02654867256637168, |
|
"grad_norm": 2.375, |
|
"learning_rate": 6e-05, |
|
"loss": 2.0444, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.029498525073746312, |
|
"grad_norm": 2.5625, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.9299, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.032448377581120944, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 2.2301, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 8e-05, |
|
"loss": 2.0655, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.038348082595870206, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 2.0205, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04129793510324484, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 2.0813, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04424778761061947, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 0.0001, |
|
"loss": 2.0, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0471976401179941, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 0.00010666666666666667, |
|
"loss": 2.0752, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05014749262536873, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 0.00011333333333333334, |
|
"loss": 2.0274, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 0.00012, |
|
"loss": 2.0038, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.05604719764011799, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 0.00012666666666666666, |
|
"loss": 1.8893, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.058997050147492625, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 0.00013333333333333334, |
|
"loss": 1.9792, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.061946902654867256, |
|
"grad_norm": 1.5, |
|
"learning_rate": 0.00014, |
|
"loss": 1.9674, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06489675516224189, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 0.00014666666666666666, |
|
"loss": 1.926, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06784660766961652, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 0.00015333333333333334, |
|
"loss": 1.9141, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 1.375, |
|
"learning_rate": 0.00016, |
|
"loss": 1.7923, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07374631268436578, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.0001666666666666667, |
|
"loss": 2.0394, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07669616519174041, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.00017333333333333334, |
|
"loss": 1.8264, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.07964601769911504, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 0.00018, |
|
"loss": 1.8675, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08259587020648967, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 0.0001866666666666667, |
|
"loss": 1.9696, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0855457227138643, |
|
"grad_norm": 0.7578125, |
|
"learning_rate": 0.00019333333333333333, |
|
"loss": 2.0026, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.0002, |
|
"loss": 2.1422, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09144542772861357, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.00019999949343511917, |
|
"loss": 1.9063, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0943952802359882, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.00019999797374560874, |
|
"loss": 1.9048, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09734513274336283, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0001999954409468652, |
|
"loss": 1.829, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10029498525073746, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.00019999189506454904, |
|
"loss": 1.8491, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.10324483775811209, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0001999873361345847, |
|
"loss": 1.91, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019998176420316002, |
|
"loss": 1.8505, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.10914454277286136, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0001999751793267259, |
|
"loss": 1.8559, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11209439528023599, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.00019996758157199573, |
|
"loss": 1.9353, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11504424778761062, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.00019995897101594454, |
|
"loss": 1.8246, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.11799410029498525, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.00019994934774580851, |
|
"loss": 1.8832, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12094395280235988, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 0.00019993871185908381, |
|
"loss": 1.856, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.00019992706346352577, |
|
"loss": 2.079, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12684365781710916, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00019991440267714782, |
|
"loss": 1.8273, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.12979351032448377, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 0.00019990072962822007, |
|
"loss": 1.8057, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13274336283185842, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.00019988604445526827, |
|
"loss": 1.9154, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13569321533923304, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00019987034730707234, |
|
"loss": 1.7652, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.13864306784660768, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0001998536383426647, |
|
"loss": 1.718, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00019983591773132882, |
|
"loss": 1.9797, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14454277286135694, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00019981718565259752, |
|
"loss": 1.7253, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14749262536873156, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 0.0001997974422962511, |
|
"loss": 1.8606, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1504424778761062, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.00019977668786231534, |
|
"loss": 1.7493, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15339233038348082, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.00019975492256105957, |
|
"loss": 1.7383, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15634218289085547, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.00019973214661299455, |
|
"loss": 1.766, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1592920353982301, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0001997083602488702, |
|
"loss": 1.7996, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16224188790560473, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.00019968356370967327, |
|
"loss": 1.7388, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16519174041297935, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00019965775724662484, |
|
"loss": 1.7388, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.168141592920354, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00019963094112117785, |
|
"loss": 1.8541, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1710914454277286, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 0.00019960311560501454, |
|
"loss": 1.7913, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17404129793510326, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 0.00019957428098004343, |
|
"loss": 1.834, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00019954443753839667, |
|
"loss": 1.768, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.17994100294985252, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00019951358558242707, |
|
"loss": 1.9784, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18289085545722714, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.000199481725424705, |
|
"loss": 1.9886, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18584070796460178, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.00019944885738801518, |
|
"loss": 1.7472, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1887905604719764, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019941498180535346, |
|
"loss": 1.7831, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19174041297935104, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0001993800990199235, |
|
"loss": 1.8306, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19469026548672566, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00019934420938513313, |
|
"loss": 1.7213, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.1976401179941003, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019930731326459088, |
|
"loss": 1.613, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20058997050147492, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019926941103210247, |
|
"loss": 1.8055, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.20353982300884957, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019923050307166655, |
|
"loss": 1.6326, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.20648967551622419, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00019919058977747135, |
|
"loss": 1.9313, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.20943952802359883, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00019914967155389027, |
|
"loss": 1.8475, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.000199107748815478, |
|
"loss": 1.9055, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2153392330383481, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.00019906482198696636, |
|
"loss": 1.7846, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2182890855457227, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00019902089150325978, |
|
"loss": 1.8251, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22123893805309736, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019897595780943102, |
|
"loss": 1.8831, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.22418879056047197, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0001989300213607168, |
|
"loss": 1.707, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.22713864306784662, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019888308262251285, |
|
"loss": 1.7173, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23008849557522124, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00019883514207036956, |
|
"loss": 1.736, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23303834808259588, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019878620018998696, |
|
"loss": 1.6251, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.2359882005899705, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019873625747720972, |
|
"loss": 1.7156, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23893805309734514, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.0001986853144380224, |
|
"loss": 1.6135, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24188790560471976, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.00019863337158854404, |
|
"loss": 2.0822, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2448377581120944, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.00019858042945502318, |
|
"loss": 1.7756, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.24778761061946902, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00019852648857383222, |
|
"loss": 1.7763, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25073746312684364, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019847154949146237, |
|
"loss": 1.7516, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2536873156342183, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001984156127645178, |
|
"loss": 1.6647, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.25663716814159293, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.00019835867895971014, |
|
"loss": 1.9323, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.25958702064896755, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019830074865385272, |
|
"loss": 1.6946, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.26253687315634217, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019824182243385465, |
|
"loss": 1.5654, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.26548672566371684, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019818190089671508, |
|
"loss": 1.8147, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26843657817109146, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001981209846495169, |
|
"loss": 1.671, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2713864306784661, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019805907430942075, |
|
"loss": 1.7172, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2743362831858407, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001979961705036587, |
|
"loss": 1.814, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.27728613569321536, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019793227386952794, |
|
"loss": 1.6489, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.28023598820059, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.00019786738505438427, |
|
"loss": 1.7909, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2831858407079646, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00019780150471563558, |
|
"loss": 1.7049, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2861356932153392, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019773463352073525, |
|
"loss": 1.472, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2890855457227139, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001976667721471752, |
|
"loss": 1.7388, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2920353982300885, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.00019759792128247922, |
|
"loss": 1.7382, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2949852507374631, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001975280816241959, |
|
"loss": 1.8477, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29793510324483774, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019745725387989164, |
|
"loss": 1.8796, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3008849557522124, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019738543876714334, |
|
"loss": 1.814, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.30383480825958703, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 0.00019731263701353133, |
|
"loss": 1.8508, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.30678466076696165, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.00019723884935663182, |
|
"loss": 1.6524, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.30973451327433627, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019716407654400952, |
|
"loss": 1.6972, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.31268436578171094, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.00019708831933321004, |
|
"loss": 1.8181, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.31563421828908556, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00019701157849175228, |
|
"loss": 1.7699, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3185840707964602, |
|
"grad_norm": 0.39453125, |
|
"learning_rate": 0.00019693385479712048, |
|
"loss": 1.5722, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3215339233038348, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00019685514903675655, |
|
"loss": 1.6242, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.32448377581120946, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00019677546200805196, |
|
"loss": 1.6736, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.3274336283185841, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019669479451833976, |
|
"loss": 1.6835, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3303834808259587, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00019661314738488627, |
|
"loss": 1.7878, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00019653052143488287, |
|
"loss": 1.7273, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.336283185840708, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019644691750543767, |
|
"loss": 1.8688, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3392330383480826, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019636233644356696, |
|
"loss": 1.8902, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3421828908554572, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00019627677910618666, |
|
"loss": 1.8055, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.34513274336283184, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019619024636010363, |
|
"loss": 1.6037, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3480825958702065, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 0.00019610273908200683, |
|
"loss": 1.7333, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.35103244837758113, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019601425815845858, |
|
"loss": 1.7557, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.35398230088495575, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019592480448588542, |
|
"loss": 1.7445, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.35693215339233036, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019583437897056915, |
|
"loss": 1.6733, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.35988200589970504, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019574298252863758, |
|
"loss": 1.7736, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.36283185840707965, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019565061608605526, |
|
"loss": 1.8282, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.36578171091445427, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0001955572805786141, |
|
"loss": 1.8028, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.3687315634218289, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019546297695192388, |
|
"loss": 1.965, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.37168141592920356, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00019536770616140276, |
|
"loss": 1.7437, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.3746312684365782, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00019527146917226745, |
|
"loss": 1.8328, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3775811209439528, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00019517426695952358, |
|
"loss": 1.721, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3805309734513274, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019507610050795558, |
|
"loss": 2.0209, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3834808259587021, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019497697081211708, |
|
"loss": 1.7349, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3864306784660767, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00019487687887632048, |
|
"loss": 1.7402, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3893805309734513, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00019477582571462705, |
|
"loss": 1.7905, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.39233038348082594, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00019467381235083634, |
|
"loss": 1.6632, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3952802359882006, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00019457083981847616, |
|
"loss": 1.6401, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.39823008849557523, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001944669091607919, |
|
"loss": 1.7757, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.40117994100294985, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0001943620214307359, |
|
"loss": 1.6577, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.40412979351032446, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.0001942561776909571, |
|
"loss": 1.8717, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.40707964601769914, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019414937901378982, |
|
"loss": 1.7336, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.41002949852507375, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001940416264812433, |
|
"loss": 1.7332, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.41297935103244837, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00019393292118499044, |
|
"loss": 1.7791, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.415929203539823, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019382326422635705, |
|
"loss": 1.9225, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.41887905604719766, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00019371265671631037, |
|
"loss": 1.6715, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4218289085545723, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.00019360109977544795, |
|
"loss": 1.8205, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4247787610619469, |
|
"grad_norm": 0.38671875, |
|
"learning_rate": 0.00019348859453398646, |
|
"loss": 1.5496, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4277286135693215, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019337514213174992, |
|
"loss": 1.9299, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4306784660766962, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0001932607437181584, |
|
"loss": 1.6924, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4336283185840708, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019314540045221626, |
|
"loss": 1.7083, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4365781710914454, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001930291135025005, |
|
"loss": 1.6546, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.43952802359882004, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.00019291188404714878, |
|
"loss": 1.8071, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4424778761061947, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0001927937132738476, |
|
"loss": 1.7596, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.44542772861356933, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0001926746023798202, |
|
"loss": 1.6805, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.44837758112094395, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.00019255455257181456, |
|
"loss": 1.6037, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.45132743362831856, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.000192433565066091, |
|
"loss": 1.7223, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.45427728613569324, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019231164108840995, |
|
"loss": 1.8775, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.45722713864306785, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00019218878187401948, |
|
"loss": 1.7495, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.46017699115044247, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00019206498866764288, |
|
"loss": 1.6175, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4631268436578171, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.00019194026272346596, |
|
"loss": 1.6249, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.46607669616519176, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00019181460530512441, |
|
"loss": 1.5974, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4690265486725664, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001916880176856909, |
|
"loss": 1.7076, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.471976401179941, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.0001915605011476623, |
|
"loss": 1.633, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4749262536873156, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001914320569829466, |
|
"loss": 1.7677, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.4778761061946903, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.0001913026864928498, |
|
"loss": 1.6533, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.4808259587020649, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00019117239098806295, |
|
"loss": 1.8283, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4837758112094395, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00019104117178864852, |
|
"loss": 1.5265, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.48672566371681414, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00019090903022402729, |
|
"loss": 1.6958, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4896755162241888, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.00019077596763296474, |
|
"loss": 1.7384, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.49262536873156343, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00019064198536355761, |
|
"loss": 1.6487, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.49557522123893805, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00019050708477322018, |
|
"loss": 1.9019, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.49852507374631266, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00019037126722867045, |
|
"loss": 1.856, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5014749262536873, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00019023453410591635, |
|
"loss": 1.602, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.504424778761062, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 1.7144, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5073746312684366, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.000189958326676193, |
|
"loss": 1.664, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5103244837758112, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001898188551675634, |
|
"loss": 1.818, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5132743362831859, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018967847367738048, |
|
"loss": 1.6334, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5162241887905604, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.00018953718362789085, |
|
"loss": 1.6842, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5191740412979351, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001893949864505461, |
|
"loss": 1.8626, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5221238938053098, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 0.00018925188358598813, |
|
"loss": 1.718, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5250737463126843, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00018910787648403465, |
|
"loss": 1.5932, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.528023598820059, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00018896296660366447, |
|
"loss": 1.7042, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5309734513274337, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00018881715541300276, |
|
"loss": 1.7268, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5339233038348082, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.000188670444389306, |
|
"loss": 1.5908, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5368731563421829, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00018852283501894732, |
|
"loss": 1.5886, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5398230088495575, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00018837432879740114, |
|
"loss": 1.7165, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5427728613569321, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0001882249272292282, |
|
"loss": 1.8307, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5457227138643068, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001880746318280602, |
|
"loss": 1.6305, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5486725663716814, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00018792344411658468, |
|
"loss": 1.5587, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.551622418879056, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00018777136562652929, |
|
"loss": 1.5655, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5545722713864307, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018761839789864645, |
|
"loss": 1.8039, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5575221238938053, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00018746454248269777, |
|
"loss": 1.6224, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.56047197640118, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018730980093743823, |
|
"loss": 1.6452, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5634218289085545, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0001871541748306005, |
|
"loss": 1.7146, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5663716814159292, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.000186997665738879, |
|
"loss": 1.6701, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.5693215339233039, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00018684027524791386, |
|
"loss": 1.6254, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5722713864306784, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00018668200495227505, |
|
"loss": 1.7132, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5752212389380531, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00018652285645544603, |
|
"loss": 1.6175, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5781710914454278, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00018636283136980758, |
|
"loss": 1.7819, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5811209439528023, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001862019313166214, |
|
"loss": 1.7777, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.584070796460177, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00018604015792601396, |
|
"loss": 1.6488, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.5870206489675516, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018587751283695949, |
|
"loss": 1.6517, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00018571399769726386, |
|
"loss": 1.8747, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5899705014749262, |
|
"eval_loss": 1.701263189315796, |
|
"eval_runtime": 31.6775, |
|
"eval_samples_per_second": 31.568, |
|
"eval_steps_per_second": 3.946, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.5929203539823009, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001855496141635476, |
|
"loss": 1.6067, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.5958702064896755, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.0001853843639012292, |
|
"loss": 1.7201, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.5988200589970502, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00018521824858450827, |
|
"loss": 1.7772, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6017699115044248, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 0.0001850512698963485, |
|
"loss": 1.7896, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6047197640117994, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00018488342952846073, |
|
"loss": 1.5824, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6076696165191741, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00018471472918128563, |
|
"loss": 1.6861, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6106194690265486, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018454517056397661, |
|
"loss": 1.7572, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6135693215339233, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00018437475539438255, |
|
"loss": 1.9539, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.616519174041298, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00018420348539903019, |
|
"loss": 1.657, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6194690265486725, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00018403136231310684, |
|
"loss": 1.5934, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.6224188790560472, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00018385838788044273, |
|
"loss": 1.7199, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6253687315634219, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.00018368456385349334, |
|
"loss": 1.5833, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.6283185840707964, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.00018350989199332154, |
|
"loss": 1.6056, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6312684365781711, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00018333437406957995, |
|
"loss": 1.8369, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6342182890855457, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.000183158011860493, |
|
"loss": 1.7488, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6371681415929203, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00018298080715283858, |
|
"loss": 1.7316, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.640117994100295, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0001828027617419304, |
|
"loss": 1.641, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6430678466076696, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0001826238774315995, |
|
"loss": 1.7285, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.6460176991150443, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00018244415603417603, |
|
"loss": 1.5975, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6489675516224189, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.000182263599370471, |
|
"loss": 1.7427, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6519174041297935, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0001820822092697577, |
|
"loss": 1.8156, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6548672566371682, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00018189998756975318, |
|
"loss": 1.7881, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6578171091445427, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00018171693611659977, |
|
"loss": 1.8643, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6607669616519174, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00018153305676484619, |
|
"loss": 1.7019, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.6637168141592921, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0001813483513774289, |
|
"loss": 1.7559, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00018116282182565311, |
|
"loss": 1.7332, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.6696165191740413, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00018097646998917397, |
|
"loss": 1.6294, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.672566371681416, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.0001807892977559774, |
|
"loss": 1.671, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6755162241887905, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00018060130702236097, |
|
"loss": 1.7488, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6784660766961652, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.00018041249969291475, |
|
"loss": 1.7029, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.6814159292035398, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00018022287768050202, |
|
"loss": 1.6486, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.6843657817109144, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0001800324429062398, |
|
"loss": 1.6692, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.6873156342182891, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00017984119729947944, |
|
"loss": 1.64, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.6902654867256637, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00017964914279778715, |
|
"loss": 1.714, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.6932153392330384, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00017945628134692423, |
|
"loss": 1.6772, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.696165191740413, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0001792626149008274, |
|
"loss": 1.5846, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.6991150442477876, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0001790681454215891, |
|
"loss": 1.5758, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.7020648967551623, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00017887287487943754, |
|
"loss": 1.6947, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.7050147492625368, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00017867680525271662, |
|
"loss": 1.9267, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7079646017699115, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001784799385278661, |
|
"loss": 1.7433, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7109144542772862, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.0001782822766994014, |
|
"loss": 1.8449, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7138643067846607, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0001780838217698933, |
|
"loss": 1.7898, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.7168141592920354, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00017788457574994778, |
|
"loss": 1.6149, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7197640117994101, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00017768454065818551, |
|
"loss": 1.7006, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.7227138643067846, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.00017748371852122164, |
|
"loss": 1.7115, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7256637168141593, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00017728211137364489, |
|
"loss": 1.6251, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.7286135693215339, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00017707972125799735, |
|
"loss": 2.012, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7315634218289085, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.0001768765502247535, |
|
"loss": 1.744, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.7345132743362832, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00017667260033229953, |
|
"loss": 1.8532, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7374631268436578, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00017646787364691252, |
|
"loss": 1.483, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.7404129793510325, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00017626237224273945, |
|
"loss": 1.7557, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7433628318584071, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00017605609820177617, |
|
"loss": 1.8172, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.7463126843657817, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00017584905361384644, |
|
"loss": 1.7924, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7492625368731564, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00017564124057658056, |
|
"loss": 1.6829, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.7522123893805309, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00017543266119539422, |
|
"loss": 1.6772, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7551622418879056, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.0001752233175834673, |
|
"loss": 1.7583, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.7581120943952803, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.00017501321186172216, |
|
"loss": 1.533, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7610619469026548, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 0.00017480234615880247, |
|
"loss": 1.6097, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.7640117994100295, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00017459072261105137, |
|
"loss": 1.7074, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7669616519174042, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.00017437834336249001, |
|
"loss": 1.6533, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7699115044247787, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00017416521056479577, |
|
"loss": 1.706, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7728613569321534, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00017395132637728047, |
|
"loss": 1.6761, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.775811209439528, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.00017373669296686842, |
|
"loss": 1.7223, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7787610619469026, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00017352131250807467, |
|
"loss": 1.6559, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.7817109144542773, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00017330518718298264, |
|
"loss": 1.6263, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.7846607669616519, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00017308831918122242, |
|
"loss": 1.6628, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.7876106194690266, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.0001728707106999482, |
|
"loss": 1.7917, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.7905604719764012, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 0.00017265236394381633, |
|
"loss": 1.7092, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.7935103244837758, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00017243328112496277, |
|
"loss": 1.4767, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.7964601769911505, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.0001722134644629807, |
|
"loss": 1.6683, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.799410029498525, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0001719929161848982, |
|
"loss": 1.7758, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8023598820058997, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00017177163852515553, |
|
"loss": 1.8755, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8053097345132744, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00017154963372558246, |
|
"loss": 1.8143, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8082595870206489, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.0001713269040353757, |
|
"loss": 1.9027, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8112094395280236, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.0001711034517110761, |
|
"loss": 1.635, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8141592920353983, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00017087927901654557, |
|
"loss": 1.7577, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8171091445427728, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00017065438822294447, |
|
"loss": 1.5825, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8200589970501475, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00017042878160870829, |
|
"loss": 1.8417, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.8230088495575221, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00017020246145952477, |
|
"loss": 1.7256, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8259587020648967, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.00016997543006831082, |
|
"loss": 1.6442, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8289085545722714, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 0.00016974768973518893, |
|
"loss": 1.6599, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.831858407079646, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.00016951924276746425, |
|
"loss": 1.5897, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.8348082595870207, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00016929009147960094, |
|
"loss": 1.9082, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8377581120943953, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00016906023819319893, |
|
"loss": 1.6846, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8407079646017699, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00016882968523697028, |
|
"loss": 1.6893, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8436578171091446, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.0001685984349467156, |
|
"loss": 1.6329, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8466076696165191, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001683664896653004, |
|
"loss": 1.6538, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8495575221238938, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00016813385174263137, |
|
"loss": 1.8233, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.8525073746312685, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00016790052353563253, |
|
"loss": 1.5222, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.855457227138643, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00016766650740822136, |
|
"loss": 1.7913, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8584070796460177, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00016743180573128495, |
|
"loss": 1.6088, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8613569321533924, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 0.00016719642088265578, |
|
"loss": 1.6839, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8643067846607669, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0001669603552470879, |
|
"loss": 1.6752, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8672566371681416, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00016672361121623238, |
|
"loss": 1.7121, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8702064896755162, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.0001664861911886136, |
|
"loss": 1.6364, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8731563421828908, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00016624809756960444, |
|
"loss": 1.7002, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8761061946902655, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0001660093327714022, |
|
"loss": 1.797, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.8790560471976401, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00016576989921300418, |
|
"loss": 1.8461, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.8820058997050148, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00016552979932018297, |
|
"loss": 1.6665, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.8849557522123894, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00016528903552546207, |
|
"loss": 1.6472, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.887905604719764, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.0001650476102680911, |
|
"loss": 1.6908, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.8908554572271387, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.0001648055259940212, |
|
"loss": 1.6468, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.8938053097345132, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00016456278515588024, |
|
"loss": 1.6656, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.8967551622418879, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00016431939021294787, |
|
"loss": 1.7536, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.8997050147492626, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00016407534363113073, |
|
"loss": 1.7766, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9026548672566371, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0001638306478829373, |
|
"loss": 1.6868, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.9056047197640118, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00016358530544745308, |
|
"loss": 1.6418, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.9085545722713865, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00016333931881031528, |
|
"loss": 1.6935, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.911504424778761, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00016309269046368776, |
|
"loss": 1.6796, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9144542772861357, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00016284542290623567, |
|
"loss": 1.5933, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9174041297935103, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00016259751864310026, |
|
"loss": 1.587, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9203539823008849, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 0.00016234898018587337, |
|
"loss": 1.7507, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.9233038348082596, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00016209981005257208, |
|
"loss": 1.7791, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9262536873156342, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0001618500107676132, |
|
"loss": 1.7078, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.9292035398230089, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.0001615995848617876, |
|
"loss": 1.7454, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9321533923303835, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00016134853487223465, |
|
"loss": 1.7158, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9351032448377581, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.00016109686334241655, |
|
"loss": 1.6761, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9380530973451328, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00016084457282209243, |
|
"loss": 1.7169, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9410029498525073, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.0001605916658672927, |
|
"loss": 1.6098, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.943952802359882, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00016033814504029292, |
|
"loss": 1.6303, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9469026548672567, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.00016008401290958807, |
|
"loss": 1.6885, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9498525073746312, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.0001598292720498664, |
|
"loss": 1.7594, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9528023598820059, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00015957392504198337, |
|
"loss": 1.8732, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9557522123893806, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00015931797447293552, |
|
"loss": 1.5923, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9587020648967551, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00015906142293583425, |
|
"loss": 1.5408, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9616519174041298, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00015880427302987951, |
|
"loss": 1.7809, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9646017699115044, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00015854652736033354, |
|
"loss": 1.5759, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.967551622418879, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00015828818853849444, |
|
"loss": 1.6399, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9705014749262537, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0001580292591816697, |
|
"loss": 1.7938, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9734513274336283, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.0001577697419131497, |
|
"loss": 1.8051, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.976401179941003, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00015750963936218105, |
|
"loss": 1.6255, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.9793510324483776, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00015724895416394018, |
|
"loss": 1.7337, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.9823008849557522, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00015698768895950642, |
|
"loss": 1.6922, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.9852507374631269, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00015672584639583528, |
|
"loss": 1.6776, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.9882005899705014, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00015646342912573177, |
|
"loss": 1.8007, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.9911504424778761, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00015620043980782327, |
|
"loss": 1.566, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.9941002949852508, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00015593688110653283, |
|
"loss": 1.4985, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.9970501474926253, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00015567275569205218, |
|
"loss": 1.7079, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00015540806624031442, |
|
"loss": 1.6612, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.0029498525073746, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00015514281543296715, |
|
"loss": 1.6204, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0058997050147493, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.00015487700595734536, |
|
"loss": 1.6187, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.008849557522124, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0001546106405064438, |
|
"loss": 1.561, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.0117994100294985, |
|
"grad_norm": 0.423828125, |
|
"learning_rate": 0.00015434372177889022, |
|
"loss": 1.6984, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.0147492625368733, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00015407625247891772, |
|
"loss": 1.542, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.0176991150442478, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00015380823531633729, |
|
"loss": 1.6418, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0206489675516224, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.0001535396730065106, |
|
"loss": 1.4689, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.023598820058997, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.0001532705682703224, |
|
"loss": 1.5845, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0265486725663717, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 0.00015300092383415282, |
|
"loss": 1.5536, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.0294985250737463, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 0.00015273074242984987, |
|
"loss": 1.825, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.0324483775811208, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00015246002679470175, |
|
"loss": 1.6439, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0353982300884956, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.0001521887796714092, |
|
"loss": 1.7412, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.0383480825958702, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.00015191700380805752, |
|
"loss": 1.6896, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0412979351032448, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.0001516447019580889, |
|
"loss": 1.714, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0442477876106195, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00015137187688027436, |
|
"loss": 1.6801, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.047197640117994, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.000151098531338686, |
|
"loss": 1.6533, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0501474926253687, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00015082466810266884, |
|
"loss": 1.8058, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0530973451327434, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00015055028994681284, |
|
"loss": 1.7444, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.056047197640118, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00015027539965092477, |
|
"loss": 1.6367, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0589970501474926, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.6673, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0619469026548674, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.0001497240937841944, |
|
"loss": 1.5997, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.064896755162242, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00014944768379879591, |
|
"loss": 1.7991, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0678466076696165, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00014917077284419634, |
|
"loss": 1.6099, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0707964601769913, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00014889336372586305, |
|
"loss": 1.6051, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.0737463126843658, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.00014861545925431036, |
|
"loss": 1.5927, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.0766961651917404, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.00014833706224507114, |
|
"loss": 1.7938, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.079646017699115, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00014805817551866838, |
|
"loss": 1.5313, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.0825958702064897, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.00014777880190058654, |
|
"loss": 1.6738, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.0855457227138643, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.0001474989442212428, |
|
"loss": 1.7013, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.0884955752212389, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 0.0001472186053159587, |
|
"loss": 1.7573, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.0914454277286136, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00014693778802493104, |
|
"loss": 1.7217, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.0943952802359882, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00014665649519320342, |
|
"loss": 1.562, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.0973451327433628, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.0001463747296706372, |
|
"loss": 1.666, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.1002949852507375, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00014609249431188278, |
|
"loss": 1.6303, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.103244837758112, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0001458097919763506, |
|
"loss": 1.7065, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.1061946902654867, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 0.0001455266255281821, |
|
"loss": 1.5575, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.1091445427728615, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.0001452429978362209, |
|
"loss": 1.6452, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.112094395280236, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00014495891177398353, |
|
"loss": 1.6286, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1150442477876106, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0001446743702196304, |
|
"loss": 1.7517, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.1179941002949851, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.0001443893760559367, |
|
"loss": 1.6429, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.12094395280236, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00014410393217026318, |
|
"loss": 1.6122, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1238938053097345, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.0001438180414545267, |
|
"loss": 1.5116, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.1268436578171093, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00014353170680517132, |
|
"loss": 1.7445, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.1297935103244838, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00014324493112313844, |
|
"loss": 1.8679, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.1327433628318584, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00014295771731383797, |
|
"loss": 1.8117, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.135693215339233, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0001426700682871184, |
|
"loss": 1.7062, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.1386430678466077, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001423819869572377, |
|
"loss": 1.6174, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.1415929203539823, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001420934762428335, |
|
"loss": 1.5873, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.1445427728613569, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00014180453906689378, |
|
"loss": 1.6217, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.1474926253687316, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00014151517835672697, |
|
"loss": 1.5944, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.1504424778761062, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00014122539704393265, |
|
"loss": 1.7194, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1533923303834808, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 0.00014093519806437148, |
|
"loss": 1.6986, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1563421828908556, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00014064458435813566, |
|
"loss": 1.6477, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1592920353982301, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00014035355886951923, |
|
"loss": 1.6155, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1622418879056047, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00014006212454698797, |
|
"loss": 1.6334, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1651917404129795, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.00013977028434314975, |
|
"loss": 1.5663, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.168141592920354, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0001394780412147245, |
|
"loss": 1.6925, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.1710914454277286, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001391853981225144, |
|
"loss": 1.9437, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.1740412979351031, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00013889235803137364, |
|
"loss": 1.5375, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.176991150442478, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.00013859892391017865, |
|
"loss": 1.5203, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.1799410029498525, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00013830509873179785, |
|
"loss": 1.7761, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.1799410029498525, |
|
"eval_loss": 1.6681677103042603, |
|
"eval_runtime": 31.9388, |
|
"eval_samples_per_second": 31.31, |
|
"eval_steps_per_second": 3.914, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.182890855457227, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 0.00013801088547306148, |
|
"loss": 1.6017, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.1858407079646018, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00013771628711473172, |
|
"loss": 1.4662, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.1887905604719764, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00013742130664147218, |
|
"loss": 1.5061, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.191740412979351, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.00013712594704181784, |
|
"loss": 1.5444, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.1946902654867257, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001368302113081447, |
|
"loss": 1.5087, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.1976401179941003, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 0.00013653410243663952, |
|
"loss": 1.8815, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.2005899705014749, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.00013623762342726935, |
|
"loss": 1.8163, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.2035398230088497, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00013594077728375128, |
|
"loss": 1.6584, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.2064896755162242, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.00013564356701352198, |
|
"loss": 1.7079, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.2094395280235988, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00013534599562770714, |
|
"loss": 1.5648, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2123893805309733, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00013504806614109098, |
|
"loss": 1.839, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.2153392330383481, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00013474978157208592, |
|
"loss": 1.6327, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2182890855457227, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00013445114494270154, |
|
"loss": 1.692, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.2212389380530975, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0001341521592785145, |
|
"loss": 1.6227, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.224188790560472, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00013385282760863758, |
|
"loss": 1.7722, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2271386430678466, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00013355315296568893, |
|
"loss": 1.5891, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.2300884955752212, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.0001332531383857616, |
|
"loss": 1.6964, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.233038348082596, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.0001329527869083926, |
|
"loss": 1.5819, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.2359882005899705, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00013265210157653213, |
|
"loss": 1.7053, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.238938053097345, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00013235108543651272, |
|
"loss": 1.8764, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2418879056047198, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.0001320497415380185, |
|
"loss": 1.6492, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.2448377581120944, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.00013174807293405428, |
|
"loss": 1.6954, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.247787610619469, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00013144608268091435, |
|
"loss": 1.6719, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.2507374631268435, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 0.00013114377383815188, |
|
"loss": 1.6428, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.2536873156342183, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.00013084114946854776, |
|
"loss": 1.7714, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2566371681415929, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00013053821263807946, |
|
"loss": 1.6289, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.2595870206489677, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00013023496641589025, |
|
"loss": 1.6812, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2625368731563422, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00012993141387425788, |
|
"loss": 1.7832, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.2654867256637168, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 0.00012962755808856342, |
|
"loss": 1.5353, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.2684365781710913, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.0001293234021372603, |
|
"loss": 1.7298, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.2713864306784661, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.00012901894910184297, |
|
"loss": 1.6888, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.2743362831858407, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00012871420206681571, |
|
"loss": 1.5897, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.2772861356932155, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00012840916411966153, |
|
"loss": 1.7212, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.28023598820059, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00012810383835081058, |
|
"loss": 1.5934, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.2831858407079646, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 0.00012779822785360912, |
|
"loss": 1.5638, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.2861356932153392, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00012749233572428804, |
|
"loss": 1.6373, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.289085545722714, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00012718616506193151, |
|
"loss": 1.7397, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.2920353982300885, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.00012687971896844575, |
|
"loss": 1.7015, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.294985250737463, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.00012657300054852718, |
|
"loss": 1.5377, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.2979351032448379, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.0001262660129096315, |
|
"loss": 1.6684, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3008849557522124, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00012595875916194188, |
|
"loss": 1.7431, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.303834808259587, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 0.0001256512424183373, |
|
"loss": 1.6242, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.3067846607669615, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.0001253434657943616, |
|
"loss": 1.5406, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.3097345132743363, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.00012503543240819127, |
|
"loss": 1.7173, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.3126843657817109, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00012472714538060422, |
|
"loss": 1.7823, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3156342182890857, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.0001244186078349481, |
|
"loss": 1.7012, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3185840707964602, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.00012410982289710865, |
|
"loss": 1.6575, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.3215339233038348, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0001238007936954779, |
|
"loss": 1.7275, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3244837758112094, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.0001234915233609227, |
|
"loss": 1.6566, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.3274336283185841, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 0.00012318201502675285, |
|
"loss": 1.5599, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3303834808259587, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 0.00012287227182868938, |
|
"loss": 1.7428, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.00012256229690483283, |
|
"loss": 1.5681, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.336283185840708, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.00012225209339563145, |
|
"loss": 1.6235, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.3392330383480826, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00012194166444384927, |
|
"loss": 1.6855, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.3421828908554572, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00012163101319453436, |
|
"loss": 1.7948, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.3451327433628317, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 0.00012132014279498703, |
|
"loss": 1.5448, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.3480825958702065, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 0.00012100905639472779, |
|
"loss": 1.7487, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.351032448377581, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00012069775714546559, |
|
"loss": 1.6448, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.3539823008849559, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00012038624820106572, |
|
"loss": 1.6214, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.3569321533923304, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00012007453271751805, |
|
"loss": 1.6763, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.359882005899705, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00011976261385290486, |
|
"loss": 1.6629, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.3628318584070795, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.00011945049476736905, |
|
"loss": 1.6993, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.3657817109144543, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00011913817862308194, |
|
"loss": 1.631, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.368731563421829, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00011882566858421135, |
|
"loss": 1.6783, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.3716814159292037, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00011851296781688952, |
|
"loss": 1.5765, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.3746312684365782, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00011820007948918093, |
|
"loss": 1.612, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.3775811209439528, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.00011788700677105037, |
|
"loss": 1.6183, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.3805309734513274, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00011757375283433076, |
|
"loss": 1.7006, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.3834808259587021, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00011726032085269092, |
|
"loss": 1.6204, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.3864306784660767, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00011694671400160356, |
|
"loss": 1.6769, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.3893805309734513, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.00011663293545831302, |
|
"loss": 1.7014, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.392330383480826, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 0.00011631898840180309, |
|
"loss": 1.602, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.3952802359882006, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.00011600487601276486, |
|
"loss": 1.7864, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.3982300884955752, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00011569060147356441, |
|
"loss": 1.783, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.4011799410029497, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00011537616796821064, |
|
"loss": 1.6512, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4041297935103245, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.000115061578682323, |
|
"loss": 1.6049, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.407079646017699, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00011474683680309912, |
|
"loss": 1.9239, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.4100294985250739, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00011443194551928266, |
|
"loss": 1.4879, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.4129793510324484, |
|
"grad_norm": 0.390625, |
|
"learning_rate": 0.000114116908021131, |
|
"loss": 1.6713, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.415929203539823, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.00011380172750038269, |
|
"loss": 1.5695, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4188790560471976, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00011348640715022544, |
|
"loss": 1.861, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.4218289085545723, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00011317095016526362, |
|
"loss": 1.6248, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.424778761061947, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00011285535974148576, |
|
"loss": 1.6543, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.4277286135693215, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.00011253963907623235, |
|
"loss": 1.4798, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.4306784660766962, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00011222379136816345, |
|
"loss": 1.6346, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.4336283185840708, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 0.00011190781981722623, |
|
"loss": 1.6701, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.4365781710914454, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00011159172762462242, |
|
"loss": 1.6681, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.43952802359882, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00011127551799277616, |
|
"loss": 1.8147, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.4424778761061947, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.00011095919412530136, |
|
"loss": 1.8219, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.4454277286135693, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0001106427592269692, |
|
"loss": 1.7514, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.448377581120944, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00011032621650367585, |
|
"loss": 1.6597, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.4513274336283186, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 0.00011000956916240985, |
|
"loss": 1.6328, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.4542772861356932, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 0.00010969282041121962, |
|
"loss": 1.6175, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.4572271386430677, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00010937597345918109, |
|
"loss": 1.5568, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.4601769911504425, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00010905903151636501, |
|
"loss": 1.6987, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.463126843657817, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.00010874199779380446, |
|
"loss": 1.5353, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.4660766961651919, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0001084248755034625, |
|
"loss": 1.6101, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.4690265486725664, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.00010810766785819946, |
|
"loss": 1.7283, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.471976401179941, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.00010779037807174033, |
|
"loss": 1.7475, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.4749262536873156, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00010747300935864243, |
|
"loss": 1.6163, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.4778761061946903, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.00010715556493426262, |
|
"loss": 1.7581, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.480825958702065, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00010683804801472486, |
|
"loss": 1.8644, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.4837758112094395, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 0.00010652046181688751, |
|
"loss": 1.8357, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.4867256637168142, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 0.00010620280955831087, |
|
"loss": 1.5575, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.4896755162241888, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 0.00010588509445722459, |
|
"loss": 1.7064, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.4926253687315634, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00010556731973249485, |
|
"loss": 1.6476, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.495575221238938, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.00010524948860359193, |
|
"loss": 1.625, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.4985250737463127, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00010493160429055766, |
|
"loss": 1.7089, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.5014749262536873, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00010461367001397258, |
|
"loss": 1.6814, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.504424778761062, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.00010429568899492348, |
|
"loss": 1.69, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5073746312684366, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.00010397766445497072, |
|
"loss": 1.7063, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.5103244837758112, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.00010365959961611553, |
|
"loss": 1.6066, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.5132743362831858, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 0.00010334149770076747, |
|
"loss": 1.6865, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.5162241887905603, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 0.00010302336193171174, |
|
"loss": 1.7555, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.519174041297935, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.00010270519553207642, |
|
"loss": 1.5106, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5221238938053099, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 0.00010238700172530009, |
|
"loss": 1.5335, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.5250737463126844, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00010206878373509887, |
|
"loss": 1.5687, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.528023598820059, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.00010175054478543393, |
|
"loss": 1.6076, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5309734513274336, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 0.00010143228810047875, |
|
"loss": 1.7202, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.5339233038348081, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 0.00010111401690458654, |
|
"loss": 1.6842, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.536873156342183, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.00010079573442225759, |
|
"loss": 1.6798, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.5398230088495575, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 0.00010047744387810632, |
|
"loss": 1.6449, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.5427728613569323, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.00010015914849682904, |
|
"loss": 1.5466, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.5457227138643068, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 9.9840851503171e-05, |
|
"loss": 1.5876, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.5486725663716814, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.952255612189368e-05, |
|
"loss": 1.9133, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.551622418879056, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 9.920426557774245e-05, |
|
"loss": 1.7234, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.5545722713864307, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.888598309541347e-05, |
|
"loss": 1.6855, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.5575221238938053, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 9.856771189952126e-05, |
|
"loss": 1.844, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.56047197640118, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 9.824945521456612e-05, |
|
"loss": 1.6246, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.5634218289085546, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.793121626490115e-05, |
|
"loss": 1.6824, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.5663716814159292, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 9.761299827469992e-05, |
|
"loss": 1.7217, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.5693215339233038, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 9.729480446792357e-05, |
|
"loss": 1.6912, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.5722713864306783, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.69766380682883e-05, |
|
"loss": 1.7145, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.575221238938053, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 9.665850229923258e-05, |
|
"loss": 1.7526, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.5781710914454279, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.634040038388448e-05, |
|
"loss": 1.6355, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.5811209439528024, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.602233554502931e-05, |
|
"loss": 1.5954, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.584070796460177, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.570431100507651e-05, |
|
"loss": 1.6228, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.5870206489675516, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 9.538632998602745e-05, |
|
"loss": 1.6955, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.5899705014749261, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 9.506839570944238e-05, |
|
"loss": 1.7207, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.592920353982301, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 9.475051139640809e-05, |
|
"loss": 1.5637, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.5958702064896755, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 9.44326802675052e-05, |
|
"loss": 1.5857, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.5988200589970503, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.411490554277541e-05, |
|
"loss": 1.5639, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.6017699115044248, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.379719044168915e-05, |
|
"loss": 1.5414, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.6047197640117994, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 9.34795381831125e-05, |
|
"loss": 1.4982, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.607669616519174, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.316195198527518e-05, |
|
"loss": 1.7949, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.6106194690265485, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 9.28444350657374e-05, |
|
"loss": 1.7075, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.6135693215339233, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 9.252699064135758e-05, |
|
"loss": 1.69, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.616519174041298, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 9.220962192825968e-05, |
|
"loss": 1.6767, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.6194690265486726, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 9.189233214180056e-05, |
|
"loss": 1.6374, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.6224188790560472, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 9.157512449653751e-05, |
|
"loss": 1.6882, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6253687315634218, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.125800220619558e-05, |
|
"loss": 1.7315, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.6283185840707963, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.094096848363502e-05, |
|
"loss": 1.7225, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.631268436578171, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.062402654081895e-05, |
|
"loss": 1.714, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.6342182890855457, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 9.030717958878037e-05, |
|
"loss": 1.8014, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.6371681415929205, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.999043083759017e-05, |
|
"loss": 1.5867, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.640117994100295, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 8.967378349632415e-05, |
|
"loss": 1.5943, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.6430678466076696, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 8.935724077303083e-05, |
|
"loss": 1.6402, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.6460176991150441, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 8.904080587469868e-05, |
|
"loss": 1.6993, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.648967551622419, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 8.872448200722385e-05, |
|
"loss": 1.8536, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.6519174041297935, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 8.840827237537761e-05, |
|
"loss": 1.6816, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6548672566371683, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 8.809218018277378e-05, |
|
"loss": 1.5556, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.6578171091445428, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 8.777620863183657e-05, |
|
"loss": 1.6722, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.6607669616519174, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 8.74603609237677e-05, |
|
"loss": 1.7428, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.663716814159292, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 8.714464025851427e-05, |
|
"loss": 1.6679, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 8.682904983473641e-05, |
|
"loss": 1.7588, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.6696165191740413, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 8.651359284977454e-05, |
|
"loss": 1.7624, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.672566371681416, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 8.619827249961733e-05, |
|
"loss": 1.6439, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.6755162241887906, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 8.588309197886905e-05, |
|
"loss": 1.7156, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.6784660766961652, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 8.556805448071735e-05, |
|
"loss": 1.5685, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.6814159292035398, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 8.525316319690092e-05, |
|
"loss": 1.6659, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.6843657817109143, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 8.493842131767701e-05, |
|
"loss": 1.5682, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.6873156342182891, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 8.462383203178938e-05, |
|
"loss": 1.61, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.6902654867256637, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 8.430939852643558e-05, |
|
"loss": 1.6964, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.6932153392330385, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 8.399512398723515e-05, |
|
"loss": 1.6285, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.696165191740413, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 8.368101159819693e-05, |
|
"loss": 1.6619, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.6991150442477876, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 8.336706454168701e-05, |
|
"loss": 1.6103, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.7020648967551621, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 8.305328599839647e-05, |
|
"loss": 1.6996, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.7050147492625367, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 8.273967914730909e-05, |
|
"loss": 1.712, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.7079646017699115, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 8.242624716566927e-05, |
|
"loss": 1.6079, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.7109144542772863, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 8.211299322894965e-05, |
|
"loss": 1.4922, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7138643067846608, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 8.17999205108191e-05, |
|
"loss": 1.7535, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.7168141592920354, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 8.148703218311053e-05, |
|
"loss": 1.5719, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.71976401179941, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 8.117433141578866e-05, |
|
"loss": 1.7844, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.7227138643067845, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 8.086182137691808e-05, |
|
"loss": 1.6151, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.7256637168141593, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 8.054950523263096e-05, |
|
"loss": 1.6327, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.7286135693215339, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 8.023738614709516e-05, |
|
"loss": 1.5235, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7315634218289087, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 7.9925467282482e-05, |
|
"loss": 1.6449, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.7345132743362832, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 7.96137517989343e-05, |
|
"loss": 1.7244, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.7374631268436578, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 7.930224285453445e-05, |
|
"loss": 1.7338, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.7404129793510323, |
|
"grad_norm": 0.26171875, |
|
"learning_rate": 7.89909436052722e-05, |
|
"loss": 1.7002, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7433628318584071, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 7.867985720501301e-05, |
|
"loss": 1.7839, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.7463126843657817, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 7.836898680546569e-05, |
|
"loss": 1.7261, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.7492625368731565, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 7.805833555615077e-05, |
|
"loss": 1.588, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.752212389380531, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 1.7156, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.7551622418879056, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 7.743770309516715e-05, |
|
"loss": 1.4764, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.7581120943952802, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 7.712772817131064e-05, |
|
"loss": 1.4958, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.7610619469026547, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 7.681798497324716e-05, |
|
"loss": 1.6959, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.7640117994100295, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 7.650847663907733e-05, |
|
"loss": 1.5634, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.7669616519174043, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 7.619920630452214e-05, |
|
"loss": 1.6262, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.7699115044247788, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 7.589017710289139e-05, |
|
"loss": 1.708, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7699115044247788, |
|
"eval_loss": 1.6580859422683716, |
|
"eval_runtime": 31.6648, |
|
"eval_samples_per_second": 31.581, |
|
"eval_steps_per_second": 3.948, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.7728613569321534, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 7.558139216505192e-05, |
|
"loss": 1.7115, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.775811209439528, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 7.527285461939577e-05, |
|
"loss": 1.5915, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.7787610619469025, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 7.496456759180875e-05, |
|
"loss": 1.6569, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.7817109144542773, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 7.465653420563845e-05, |
|
"loss": 1.6496, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.7846607669616519, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 7.434875758166271e-05, |
|
"loss": 1.6294, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.7876106194690267, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 7.404124083805819e-05, |
|
"loss": 1.6477, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.7905604719764012, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 7.373398709036849e-05, |
|
"loss": 1.668, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.7935103244837758, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 7.342699945147282e-05, |
|
"loss": 1.7326, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.7964601769911503, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 7.312028103155426e-05, |
|
"loss": 1.7393, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.799410029498525, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 7.281383493806848e-05, |
|
"loss": 1.7425, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.8023598820058997, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 7.2507664275712e-05, |
|
"loss": 1.5446, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.8053097345132745, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 7.220177214639088e-05, |
|
"loss": 1.7148, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.808259587020649, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 7.189616164918943e-05, |
|
"loss": 1.8348, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.8112094395280236, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 7.159083588033848e-05, |
|
"loss": 1.6544, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.8141592920353982, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 7.128579793318428e-05, |
|
"loss": 1.7254, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8171091445427727, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 7.098105089815707e-05, |
|
"loss": 1.891, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.8200589970501475, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 7.067659786273974e-05, |
|
"loss": 1.8228, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.823008849557522, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 7.037244191143661e-05, |
|
"loss": 1.6688, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.8259587020648969, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 7.006858612574215e-05, |
|
"loss": 1.7359, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.8289085545722714, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 6.976503358410976e-05, |
|
"loss": 1.6354, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.831858407079646, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 6.946178736192053e-05, |
|
"loss": 1.6249, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8348082595870205, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 6.915885053145228e-05, |
|
"loss": 1.5185, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.8377581120943953, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 6.885622616184817e-05, |
|
"loss": 1.7161, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.8407079646017699, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 6.855391731908567e-05, |
|
"loss": 1.8381, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.8436578171091447, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 6.825192706594575e-05, |
|
"loss": 1.4908, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.8466076696165192, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 6.795025846198148e-05, |
|
"loss": 1.6379, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.8495575221238938, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 6.764891456348729e-05, |
|
"loss": 1.6859, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.8525073746312684, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 6.734789842346791e-05, |
|
"loss": 1.5771, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.855457227138643, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 6.704721309160743e-05, |
|
"loss": 1.79, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.8584070796460177, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 6.674686161423843e-05, |
|
"loss": 1.495, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.8613569321533925, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 6.644684703431108e-05, |
|
"loss": 1.7951, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.864306784660767, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 6.614717239136246e-05, |
|
"loss": 1.7541, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.8672566371681416, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 6.584784072148555e-05, |
|
"loss": 1.9289, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.8702064896755162, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 6.554885505729849e-05, |
|
"loss": 1.6045, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.8731563421828907, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 6.525021842791414e-05, |
|
"loss": 1.6308, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.8761061946902655, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 6.495193385890901e-05, |
|
"loss": 1.4904, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.87905604719764, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 6.46540043722929e-05, |
|
"loss": 1.6158, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.8820058997050149, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 6.435643298647802e-05, |
|
"loss": 1.749, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.8849557522123894, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 6.405922271624874e-05, |
|
"loss": 1.7184, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.887905604719764, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 6.37623765727307e-05, |
|
"loss": 1.6861, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.8908554572271385, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 6.34658975633605e-05, |
|
"loss": 1.7182, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.893805309734513, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 6.316978869185532e-05, |
|
"loss": 1.5248, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.896755162241888, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 6.287405295818215e-05, |
|
"loss": 1.649, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.8997050147492627, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 6.257869335852782e-05, |
|
"loss": 1.9492, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.9026548672566372, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 6.22837128852683e-05, |
|
"loss": 1.5825, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9056047197640118, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 6.198911452693853e-05, |
|
"loss": 1.7139, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.9085545722713864, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 6.169490126820221e-05, |
|
"loss": 1.4473, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.911504424778761, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 6.140107608982136e-05, |
|
"loss": 1.707, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.9144542772861357, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 6.110764196862638e-05, |
|
"loss": 1.6014, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.9174041297935103, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 6.08146018774856e-05, |
|
"loss": 1.7926, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.920353982300885, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 6.05219587852755e-05, |
|
"loss": 1.6587, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.9233038348082596, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 6.0229715656850305e-05, |
|
"loss": 1.7686, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.9262536873156342, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 5.993787545301204e-05, |
|
"loss": 1.8097, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.9292035398230087, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 5.964644113048079e-05, |
|
"loss": 1.5351, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9321533923303835, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 5.9355415641864334e-05, |
|
"loss": 1.6233, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.935103244837758, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 5.9064801935628555e-05, |
|
"loss": 1.6599, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.9380530973451329, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 5.877460295606738e-05, |
|
"loss": 1.6859, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.9410029498525074, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 5.8484821643273036e-05, |
|
"loss": 1.6999, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.943952802359882, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 5.819546093310627e-05, |
|
"loss": 1.6546, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.9469026548672566, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 5.790652375716652e-05, |
|
"loss": 1.5823, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.9498525073746311, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 5.761801304276232e-05, |
|
"loss": 1.6418, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.952802359882006, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 5.732993171288159e-05, |
|
"loss": 2.0341, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.9557522123893807, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 5.704228268616208e-05, |
|
"loss": 1.868, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.9587020648967552, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 5.675506887686157e-05, |
|
"loss": 1.5891, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.9616519174041298, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 5.6468293194828715e-05, |
|
"loss": 1.6423, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.9646017699115044, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 5.6181958545473325e-05, |
|
"loss": 1.6751, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.967551622418879, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 5.589606782973683e-05, |
|
"loss": 1.5909, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.9705014749262537, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 5.5610623944063325e-05, |
|
"loss": 1.6203, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.9734513274336283, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 5.5325629780369635e-05, |
|
"loss": 1.6008, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.976401179941003, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 5.50410882260165e-05, |
|
"loss": 1.5562, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.9793510324483776, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 5.4757002163779136e-05, |
|
"loss": 1.6538, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.9823008849557522, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 5.4473374471817906e-05, |
|
"loss": 1.6784, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.9852507374631267, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 5.41902080236494e-05, |
|
"loss": 1.835, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.9882005899705013, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 5.39075056881172e-05, |
|
"loss": 1.7589, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.991150442477876, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 5.362527032936277e-05, |
|
"loss": 1.5125, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.9941002949852509, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 5.334350480679662e-05, |
|
"loss": 1.4691, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.9970501474926254, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 5.306221197506899e-05, |
|
"loss": 1.5478, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.41015625, |
|
"learning_rate": 5.278139468404133e-05, |
|
"loss": 1.705, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 2.0029498525073746, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 5.2501055778757194e-05, |
|
"loss": 1.6805, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 2.005899705014749, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 5.22211980994135e-05, |
|
"loss": 1.59, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.0088495575221237, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 5.1941824481331626e-05, |
|
"loss": 1.7236, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 2.0117994100294987, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 5.166293775492887e-05, |
|
"loss": 1.5412, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 2.0147492625368733, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 5.13845407456897e-05, |
|
"loss": 1.6976, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 2.017699115044248, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 5.110663627413694e-05, |
|
"loss": 1.7459, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 2.0206489675516224, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 5.082922715580367e-05, |
|
"loss": 1.7716, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 2.023598820058997, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 5.055231620120413e-05, |
|
"loss": 1.6577, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 2.0265486725663715, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 5.0275906215805625e-05, |
|
"loss": 1.7055, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 2.0294985250737465, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 5.000000000000002e-05, |
|
"loss": 1.7497, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.032448377581121, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 4.972460034907524e-05, |
|
"loss": 1.5918, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.0353982300884956, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 4.944971005318716e-05, |
|
"loss": 1.563, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.03834808259587, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 4.9175331897331154e-05, |
|
"loss": 1.4705, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.0412979351032448, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 4.890146866131403e-05, |
|
"loss": 1.6201, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.0442477876106193, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 4.862812311972567e-05, |
|
"loss": 1.5944, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.047197640117994, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 4.8355298041911125e-05, |
|
"loss": 1.6683, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.050147492625369, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 4.808299619194251e-05, |
|
"loss": 1.5897, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.0530973451327434, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 4.781122032859079e-05, |
|
"loss": 1.5515, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.056047197640118, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 4.753997320529827e-05, |
|
"loss": 1.6306, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.0589970501474926, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 4.726925757015017e-05, |
|
"loss": 1.6803, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.061946902654867, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 4.699907616584721e-05, |
|
"loss": 1.8112, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.0648967551622417, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 4.672943172967764e-05, |
|
"loss": 1.6841, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.0678466076696167, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 4.6460326993489414e-05, |
|
"loss": 1.6514, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.0707964601769913, |
|
"grad_norm": 0.384765625, |
|
"learning_rate": 4.6191764683662744e-05, |
|
"loss": 1.6776, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.073746312684366, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 4.592374752108231e-05, |
|
"loss": 1.8347, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.0766961651917404, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 4.5656278221109804e-05, |
|
"loss": 1.6331, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.079646017699115, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 4.538935949355623e-05, |
|
"loss": 1.7079, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.0825958702064895, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 4.512299404265469e-05, |
|
"loss": 1.5765, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.0855457227138645, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 4.485718456703284e-05, |
|
"loss": 1.8369, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.088495575221239, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 4.45919337596856e-05, |
|
"loss": 1.6378, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.0914454277286136, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 4.432724430794786e-05, |
|
"loss": 1.6439, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.094395280235988, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 4.406311889346717e-05, |
|
"loss": 1.7181, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.0973451327433628, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 4.379956019217675e-05, |
|
"loss": 1.6335, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.1002949852507373, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 4.3536570874268266e-05, |
|
"loss": 1.6247, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.103244837758112, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 4.327415360416468e-05, |
|
"loss": 1.5901, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.106194690265487, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 4.3012311040493594e-05, |
|
"loss": 1.7575, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.1091445427728615, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 4.275104583605982e-05, |
|
"loss": 1.6593, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.112094395280236, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 4.249036063781896e-05, |
|
"loss": 1.5962, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.1150442477876106, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 4.2230258086850374e-05, |
|
"loss": 1.7337, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.117994100294985, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 4.197074081833033e-05, |
|
"loss": 1.6672, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.1209439528023597, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 4.171181146150557e-05, |
|
"loss": 1.692, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.1238938053097347, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 4.1453472639666457e-05, |
|
"loss": 1.5694, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.1268436578171093, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 4.1195726970120516e-05, |
|
"loss": 1.6111, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.129793510324484, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 4.093857706416577e-05, |
|
"loss": 1.5507, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.1327433628318584, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 4.0682025527064486e-05, |
|
"loss": 1.5692, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.135693215339233, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 4.042607495801667e-05, |
|
"loss": 1.6376, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.1386430678466075, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 4.017072795013359e-05, |
|
"loss": 1.6968, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.1415929203539825, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 3.991598709041196e-05, |
|
"loss": 1.6051, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.144542772861357, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 3.96618549597071e-05, |
|
"loss": 1.5706, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.1474926253687316, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 3.9408334132707315e-05, |
|
"loss": 1.6237, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.150442477876106, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 3.915542717790759e-05, |
|
"loss": 1.7036, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.1533923303834808, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 3.890313665758348e-05, |
|
"loss": 1.7589, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.1563421828908553, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 3.865146512776537e-05, |
|
"loss": 1.7329, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.15929203539823, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 3.840041513821243e-05, |
|
"loss": 1.7174, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.162241887905605, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 3.814998923238685e-05, |
|
"loss": 1.6467, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.1651917404129795, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 3.7900189947427944e-05, |
|
"loss": 1.7202, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.168141592920354, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 3.7651019814126654e-05, |
|
"loss": 1.5566, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.1710914454277286, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 3.740248135689975e-05, |
|
"loss": 1.6242, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.174041297935103, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 3.7154577093764334e-05, |
|
"loss": 1.6517, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.1769911504424777, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 3.6907309536312276e-05, |
|
"loss": 1.8194, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.1799410029498527, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 3.666068118968474e-05, |
|
"loss": 1.7721, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.1828908554572273, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 3.6414694552546946e-05, |
|
"loss": 1.4912, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.185840707964602, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 3.616935211706275e-05, |
|
"loss": 1.6488, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.1887905604719764, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 3.592465636886933e-05, |
|
"loss": 1.5433, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.191740412979351, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 3.568060978705214e-05, |
|
"loss": 1.407, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.1946902654867255, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 3.543721484411976e-05, |
|
"loss": 1.7491, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.1976401179941005, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 3.51944740059788e-05, |
|
"loss": 1.7076, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.200589970501475, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 3.495238973190894e-05, |
|
"loss": 1.6633, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.2035398230088497, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 3.4710964474537966e-05, |
|
"loss": 1.6341, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.206489675516224, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 3.447020067981704e-05, |
|
"loss": 1.4725, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.2094395280235988, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 3.4230100786995824e-05, |
|
"loss": 1.5887, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.2123893805309733, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 3.399066722859782e-05, |
|
"loss": 1.7881, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.215339233038348, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 3.375190243039556e-05, |
|
"loss": 1.6271, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.218289085545723, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 3.351380881138642e-05, |
|
"loss": 1.7659, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.2212389380530975, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 3.327638878376764e-05, |
|
"loss": 1.5702, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.224188790560472, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 3.3039644752912125e-05, |
|
"loss": 1.5883, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.2271386430678466, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 3.280357911734423e-05, |
|
"loss": 1.7298, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.230088495575221, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 3.256819426871507e-05, |
|
"loss": 1.6289, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.2330383480825957, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 3.233349259177865e-05, |
|
"loss": 1.5501, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.2359882005899703, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 3.209947646436752e-05, |
|
"loss": 1.7646, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.2389380530973453, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 3.1866148257368665e-05, |
|
"loss": 1.5206, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.24188790560472, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 3.163351033469961e-05, |
|
"loss": 1.6256, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.2448377581120944, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 3.140156505328441e-05, |
|
"loss": 1.4853, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.247787610619469, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 3.117031476302975e-05, |
|
"loss": 1.6571, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.2507374631268435, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 3.0939761806801096e-05, |
|
"loss": 1.7517, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.2536873156342185, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 3.0709908520399076e-05, |
|
"loss": 1.5079, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.256637168141593, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 3.0480757232535772e-05, |
|
"loss": 1.7051, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.2595870206489677, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 3.0252310264811067e-05, |
|
"loss": 1.7725, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.262536873156342, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 3.0024569931689207e-05, |
|
"loss": 1.8137, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.265486725663717, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 2.979753854047522e-05, |
|
"loss": 1.6787, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.2684365781710913, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 2.9571218391291744e-05, |
|
"loss": 1.6662, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.271386430678466, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 2.9345611777055594e-05, |
|
"loss": 1.6513, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.274336283185841, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 2.9120720983454463e-05, |
|
"loss": 1.7066, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.2772861356932155, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 2.889654828892393e-05, |
|
"loss": 1.4659, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.28023598820059, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 2.8673095964624296e-05, |
|
"loss": 1.7731, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.2831858407079646, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 2.845036627441755e-05, |
|
"loss": 1.8289, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.286135693215339, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 2.822836147484452e-05, |
|
"loss": 1.5135, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.2890855457227137, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 2.800708381510182e-05, |
|
"loss": 1.6152, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.2920353982300883, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 2.778653553701932e-05, |
|
"loss": 1.5877, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.2949852507374633, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 2.7566718875037267e-05, |
|
"loss": 1.6472, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.297935103244838, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 2.73476360561837e-05, |
|
"loss": 1.5617, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.3008849557522124, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 2.7129289300051787e-05, |
|
"loss": 1.5242, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.303834808259587, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 2.6911680818777606e-05, |
|
"loss": 1.6186, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.3067846607669615, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 2.669481281701739e-05, |
|
"loss": 1.6891, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.309734513274336, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 2.6478687491925357e-05, |
|
"loss": 1.5884, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.312684365781711, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 2.62633070331316e-05, |
|
"loss": 1.6519, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.3156342182890857, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 2.6048673622719568e-05, |
|
"loss": 1.595, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.3185840707964602, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 2.5834789435204243e-05, |
|
"loss": 1.6358, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.321533923303835, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 2.562165663751003e-05, |
|
"loss": 1.6086, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.3244837758112094, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.540927738894866e-05, |
|
"loss": 1.6134, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.327433628318584, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 2.5197653841197543e-05, |
|
"loss": 1.636, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.330383480825959, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 2.4986788138277827e-05, |
|
"loss": 1.7181, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.3333333333333335, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.4776682416532724e-05, |
|
"loss": 1.6009, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.336283185840708, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 2.4567338804605756e-05, |
|
"loss": 1.7156, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.3392330383480826, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 2.4358759423419474e-05, |
|
"loss": 1.5736, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.342182890855457, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 2.4150946386153605e-05, |
|
"loss": 1.745, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.3451327433628317, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 2.394390179822382e-05, |
|
"loss": 1.628, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.3480825958702063, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 2.3737627757260582e-05, |
|
"loss": 1.792, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.3510324483775813, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 2.3532126353087492e-05, |
|
"loss": 1.5633, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.353982300884956, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 2.3327399667700477e-05, |
|
"loss": 1.6054, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.3569321533923304, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 2.312344977524653e-05, |
|
"loss": 1.5514, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.359882005899705, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 2.2920278742002676e-05, |
|
"loss": 1.6321, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.359882005899705, |
|
"eval_loss": 1.656064510345459, |
|
"eval_runtime": 31.6806, |
|
"eval_samples_per_second": 31.565, |
|
"eval_steps_per_second": 3.946, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.3628318584070795, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 2.2717888626355134e-05, |
|
"loss": 1.7578, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.365781710914454, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 2.251628147877839e-05, |
|
"loss": 1.5415, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.3687315634218287, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.2315459341814482e-05, |
|
"loss": 1.5915, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.3716814159292037, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 2.211542425005223e-05, |
|
"loss": 1.695, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.3746312684365782, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 2.191617823010671e-05, |
|
"loss": 1.5922, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.377581120943953, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 2.1717723300598613e-05, |
|
"loss": 1.7543, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.3805309734513274, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 2.1520061472133902e-05, |
|
"loss": 1.8857, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.383480825958702, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 2.1323194747283416e-05, |
|
"loss": 1.6589, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.386430678466077, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 2.1127125120562497e-05, |
|
"loss": 1.665, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.3893805309734515, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 2.0931854578410905e-05, |
|
"loss": 1.7077, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.392330383480826, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 2.0737385099172635e-05, |
|
"loss": 1.8159, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.3952802359882006, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 2.0543718653075782e-05, |
|
"loss": 1.6015, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.398230088495575, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 2.035085720221288e-05, |
|
"loss": 1.608, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.4011799410029497, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 2.0158802700520574e-05, |
|
"loss": 1.7618, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.4041297935103243, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 1.9967557093760226e-05, |
|
"loss": 1.6038, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.4070796460176993, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 1.9777122319497986e-05, |
|
"loss": 2.0348, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.410029498525074, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 1.958750030708527e-05, |
|
"loss": 1.5561, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.4129793510324484, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 1.9398692977639054e-05, |
|
"loss": 1.5294, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.415929203539823, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.9210702244022617e-05, |
|
"loss": 1.6702, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.4188790560471976, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 1.902353001082605e-05, |
|
"loss": 1.7828, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.421828908554572, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 1.883717817434688e-05, |
|
"loss": 1.8072, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.4247787610619467, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 1.8651648622571128e-05, |
|
"loss": 1.5543, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.4277286135693217, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 1.8466943235153844e-05, |
|
"loss": 1.6933, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.4306784660766962, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.8283063883400232e-05, |
|
"loss": 1.5554, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.433628318584071, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 1.8100012430246837e-05, |
|
"loss": 1.7567, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.4365781710914454, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 1.7917790730242322e-05, |
|
"loss": 1.7132, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.43952802359882, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 1.7736400629529003e-05, |
|
"loss": 1.5405, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.442477876106195, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 1.7555843965823992e-05, |
|
"loss": 1.7417, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.4454277286135695, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 1.7376122568400532e-05, |
|
"loss": 1.7138, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.448377581120944, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 1.7197238258069613e-05, |
|
"loss": 1.6209, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.4513274336283186, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 1.7019192847161425e-05, |
|
"loss": 1.7802, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.454277286135693, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 1.6841988139507048e-05, |
|
"loss": 1.8173, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.4572271386430677, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 1.6665625930420024e-05, |
|
"loss": 1.6358, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.4601769911504423, |
|
"grad_norm": 0.349609375, |
|
"learning_rate": 1.6490108006678494e-05, |
|
"loss": 1.7484, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.4631268436578173, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 1.6315436146506703e-05, |
|
"loss": 1.5469, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.466076696165192, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 1.614161211955727e-05, |
|
"loss": 1.6038, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.4690265486725664, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 1.5968637686893186e-05, |
|
"loss": 1.6087, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.471976401179941, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 1.5796514600969837e-05, |
|
"loss": 1.6767, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.4749262536873156, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 1.5625244605617472e-05, |
|
"loss": 1.7009, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.47787610619469, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 1.545482943602341e-05, |
|
"loss": 1.6035, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.4808259587020647, |
|
"grad_norm": 0.359375, |
|
"learning_rate": 1.528527081871438e-05, |
|
"loss": 1.6898, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.4837758112094397, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 1.5116570471539293e-05, |
|
"loss": 1.6041, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.4867256637168142, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 1.4948730103651498e-05, |
|
"loss": 1.586, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.489675516224189, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 1.478175141549174e-05, |
|
"loss": 1.8168, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.4926253687315634, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 1.4615636098770802e-05, |
|
"loss": 1.8171, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.495575221238938, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 1.4450385836452429e-05, |
|
"loss": 1.4769, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.4985250737463125, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 1.4286002302736168e-05, |
|
"loss": 1.7251, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.501474926253687, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.412248716304052e-05, |
|
"loss": 1.6303, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.504424778761062, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 1.3959842073986085e-05, |
|
"loss": 1.609, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.5073746312684366, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 1.3798068683378574e-05, |
|
"loss": 1.5212, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.510324483775811, |
|
"grad_norm": 0.337890625, |
|
"learning_rate": 1.3637168630192443e-05, |
|
"loss": 1.5393, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.5132743362831858, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.3477143544553995e-05, |
|
"loss": 1.5415, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.5162241887905603, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 1.331799504772493e-05, |
|
"loss": 1.5687, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.5191740412979353, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 1.3159724752086144e-05, |
|
"loss": 1.6611, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.52212389380531, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 1.300233426112103e-05, |
|
"loss": 1.8336, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.5250737463126844, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 1.2845825169399507e-05, |
|
"loss": 1.5726, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.528023598820059, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 1.269019906256178e-05, |
|
"loss": 1.5365, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.5309734513274336, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 1.2535457517302263e-05, |
|
"loss": 1.7332, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.533923303834808, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 1.2381602101353573e-05, |
|
"loss": 1.6458, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.5368731563421827, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 1.2228634373470726e-05, |
|
"loss": 1.54, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.5398230088495577, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 1.207655588341534e-05, |
|
"loss": 1.5563, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.5427728613569323, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 1.1925368171939777e-05, |
|
"loss": 1.822, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.545722713864307, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 1.1775072770771834e-05, |
|
"loss": 1.9668, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.5486725663716814, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 1.1625671202598875e-05, |
|
"loss": 1.8811, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.551622418879056, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 1.147716498105268e-05, |
|
"loss": 1.5154, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.554572271386431, |
|
"grad_norm": 0.263671875, |
|
"learning_rate": 1.1329555610694008e-05, |
|
"loss": 1.6577, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.557522123893805, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 1.1182844586997266e-05, |
|
"loss": 1.7692, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.56047197640118, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 1.1037033396335528e-05, |
|
"loss": 1.7178, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.5634218289085546, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.0892123515965348e-05, |
|
"loss": 1.593, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.566371681415929, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 1.0748116414011888e-05, |
|
"loss": 1.7809, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.5693215339233038, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.0605013549453913e-05, |
|
"loss": 1.593, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.5722713864306783, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 1.0462816372109153e-05, |
|
"loss": 1.8129, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.5752212389380533, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 1.0321526322619534e-05, |
|
"loss": 1.5413, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.578171091445428, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 1.0181144832436584e-05, |
|
"loss": 1.9159, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.5811209439528024, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 1.0041673323807e-05, |
|
"loss": 1.6049, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.584070796460177, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 9.903113209758096e-06, |
|
"loss": 1.6354, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.5870206489675516, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 9.765465894083636e-06, |
|
"loss": 1.6943, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.589970501474926, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 9.628732771329573e-06, |
|
"loss": 1.8668, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.5929203539823007, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 9.492915226779808e-06, |
|
"loss": 1.6031, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.5958702064896757, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 9.358014636442392e-06, |
|
"loss": 1.6361, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.5988200589970503, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 9.224032367035274e-06, |
|
"loss": 1.6734, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.601769911504425, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 9.090969775972736e-06, |
|
"loss": 1.6508, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.6047197640117994, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 8.9588282113515e-06, |
|
"loss": 1.5599, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.607669616519174, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 8.827609011937066e-06, |
|
"loss": 1.708, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.6106194690265485, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 8.697313507150184e-06, |
|
"loss": 1.6227, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.613569321533923, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 8.567943017053425e-06, |
|
"loss": 1.7512, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.616519174041298, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 8.439498852337724e-06, |
|
"loss": 1.6559, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.6194690265486726, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 8.311982314309109e-06, |
|
"loss": 1.7414, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.622418879056047, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 8.185394694875592e-06, |
|
"loss": 1.709, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.6253687315634218, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 8.059737276534041e-06, |
|
"loss": 1.5993, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.6283185840707963, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 7.935011332357112e-06, |
|
"loss": 1.6718, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.6312684365781713, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 7.811218125980535e-06, |
|
"loss": 1.6536, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.6342182890855455, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 7.688358911590078e-06, |
|
"loss": 1.718, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.6371681415929205, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 7.566434933909006e-06, |
|
"loss": 1.636, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.640117994100295, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 7.445447428185448e-06, |
|
"loss": 1.7125, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.6430678466076696, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 7.325397620179808e-06, |
|
"loss": 1.6113, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.646017699115044, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 7.206286726152434e-06, |
|
"loss": 1.5436, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.6489675516224187, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 7.088115952851238e-06, |
|
"loss": 1.5289, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.6519174041297937, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 6.970886497499518e-06, |
|
"loss": 1.5827, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.6548672566371683, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 6.854599547783736e-06, |
|
"loss": 1.5693, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.657817109144543, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 6.739256281841599e-06, |
|
"loss": 1.6449, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.6607669616519174, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 6.624857868250079e-06, |
|
"loss": 1.651, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.663716814159292, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 6.5114054660135315e-06, |
|
"loss": 1.8066, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 6.39890022455204e-06, |
|
"loss": 1.6108, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.669616519174041, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 6.287343283689661e-06, |
|
"loss": 1.7595, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.672566371681416, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 6.176735773642961e-06, |
|
"loss": 1.6273, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.6755162241887906, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 6.067078815009575e-06, |
|
"loss": 1.6977, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.678466076696165, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 5.958373518756733e-06, |
|
"loss": 1.7285, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.6814159292035398, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 5.850620986210198e-06, |
|
"loss": 1.6609, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.6843657817109143, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 5.743822309042912e-06, |
|
"loss": 1.5915, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.6873156342182893, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 5.63797856926408e-06, |
|
"loss": 1.9497, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.6902654867256635, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 5.533090839208133e-06, |
|
"loss": 1.6164, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.6932153392330385, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 5.429160181523852e-06, |
|
"loss": 1.6054, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.696165191740413, |
|
"grad_norm": 0.33203125, |
|
"learning_rate": 5.326187649163672e-06, |
|
"loss": 1.6886, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.6991150442477876, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 5.224174285372974e-06, |
|
"loss": 1.8212, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.702064896755162, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 5.123121123679519e-06, |
|
"loss": 1.5959, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.7050147492625367, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 5.023029187882944e-06, |
|
"loss": 1.5412, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.7079646017699117, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 4.923899492044437e-06, |
|
"loss": 1.6449, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.7109144542772863, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 4.825733040476465e-06, |
|
"loss": 1.6615, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.713864306784661, |
|
"grad_norm": 0.361328125, |
|
"learning_rate": 4.728530827732536e-06, |
|
"loss": 1.6998, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.7168141592920354, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 4.632293838597246e-06, |
|
"loss": 1.6826, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.71976401179941, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 4.537023048076128e-06, |
|
"loss": 1.7873, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.7227138643067845, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 4.442719421385922e-06, |
|
"loss": 1.6116, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.725663716814159, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 4.349383913944771e-06, |
|
"loss": 1.569, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.728613569321534, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 4.257017471362435e-06, |
|
"loss": 1.5746, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.7315634218289087, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 4.165621029430855e-06, |
|
"loss": 1.757, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.734513274336283, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 4.075195514114593e-06, |
|
"loss": 1.5386, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.737463126843658, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 3.985741841541446e-06, |
|
"loss": 1.7038, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.7404129793510323, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 3.897260917993184e-06, |
|
"loss": 1.7146, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.7433628318584073, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 3.8097536398963963e-06, |
|
"loss": 1.4569, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.7463126843657815, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 3.7232208938133393e-06, |
|
"loss": 1.5649, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.7492625368731565, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 3.6376635564330463e-06, |
|
"loss": 1.7154, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.752212389380531, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 3.5530824945623542e-06, |
|
"loss": 1.7077, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.7551622418879056, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 3.4694785651171456e-06, |
|
"loss": 1.6512, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.75811209439528, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 3.3868526151137445e-06, |
|
"loss": 1.6804, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.7610619469026547, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 3.3052054816602452e-06, |
|
"loss": 1.7667, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.7640117994100297, |
|
"grad_norm": 0.279296875, |
|
"learning_rate": 3.224537991948029e-06, |
|
"loss": 1.6313, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.7669616519174043, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 3.144850963243462e-06, |
|
"loss": 1.6619, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.769911504424779, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 3.0661452028795336e-06, |
|
"loss": 1.636, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.7728613569321534, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 2.9884215082477408e-06, |
|
"loss": 1.6353, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.775811209439528, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 2.9116806667899734e-06, |
|
"loss": 1.6351, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.7787610619469025, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 2.835923455990508e-06, |
|
"loss": 1.7801, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.781710914454277, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 2.7611506433682045e-06, |
|
"loss": 1.729, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.784660766961652, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 2.687362986468689e-06, |
|
"loss": 1.7433, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.7876106194690267, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 2.6145612328566717e-06, |
|
"loss": 1.6041, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.7905604719764012, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 2.5427461201083747e-06, |
|
"loss": 1.8355, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.793510324483776, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 2.471918375804105e-06, |
|
"loss": 1.7569, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.7964601769911503, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 2.402078717520795e-06, |
|
"loss": 1.7676, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.799410029498525, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 2.333227852824804e-06, |
|
"loss": 1.6935, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.8023598820058995, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.2653664792647634e-06, |
|
"loss": 1.5871, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.8053097345132745, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 2.19849528436441e-06, |
|
"loss": 1.6557, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.808259587020649, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 2.132614945615741e-06, |
|
"loss": 1.6709, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.8112094395280236, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 2.067726130472092e-06, |
|
"loss": 1.7262, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.814159292035398, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 2.003829496341325e-06, |
|
"loss": 1.615, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.8171091445427727, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 1.9409256905792762e-06, |
|
"loss": 1.6451, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.8200589970501477, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 1.8790153504831153e-06, |
|
"loss": 1.5465, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.823008849557522, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 1.8180991032849426e-06, |
|
"loss": 1.5904, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.825958702064897, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 1.7581775661453692e-06, |
|
"loss": 1.5141, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.8289085545722714, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 1.6992513461473237e-06, |
|
"loss": 1.5942, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.831858407079646, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 1.6413210402898893e-06, |
|
"loss": 1.4595, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.8348082595870205, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 1.5843872354822097e-06, |
|
"loss": 1.9405, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.837758112094395, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 1.5284505085376377e-06, |
|
"loss": 1.7256, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.84070796460177, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 1.473511426167784e-06, |
|
"loss": 1.5533, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.8436578171091447, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 1.4195705449768448e-06, |
|
"loss": 1.4959, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.8466076696165192, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 1.3666284114559612e-06, |
|
"loss": 1.6054, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.849557522123894, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 1.3146855619776134e-06, |
|
"loss": 1.6314, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.8525073746312684, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 1.2637425227902787e-06, |
|
"loss": 1.5235, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.855457227138643, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 1.21379981001305e-06, |
|
"loss": 1.6631, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.8584070796460175, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 1.1648579296304253e-06, |
|
"loss": 1.8005, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.8613569321533925, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 1.1169173774871478e-06, |
|
"loss": 1.6668, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.864306784660767, |
|
"grad_norm": 0.34375, |
|
"learning_rate": 1.0699786392832201e-06, |
|
"loss": 1.7208, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.8672566371681416, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 1.0240421905689745e-06, |
|
"loss": 1.7368, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.870206489675516, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 9.79108496740244e-07, |
|
"loss": 1.5689, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.8731563421828907, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.351780130336441e-07, |
|
"loss": 1.5193, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.8761061946902657, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 8.922511845219971e-07, |
|
"loss": 1.672, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.87905604719764, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 8.503284461097604e-07, |
|
"loss": 1.637, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.882005899705015, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 8.094102225286837e-07, |
|
"loss": 1.6489, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.8849557522123894, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 7.694969283334575e-07, |
|
"loss": 1.5329, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.887905604719764, |
|
"grad_norm": 0.3046875, |
|
"learning_rate": 7.305889678975608e-07, |
|
"loss": 1.7028, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.8908554572271385, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 6.926867354091093e-07, |
|
"loss": 1.7493, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.893805309734513, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 6.557906148669024e-07, |
|
"loss": 1.7138, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.896755162241888, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 6.199009800765265e-07, |
|
"loss": 1.5545, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.8997050147492627, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 5.850181946465361e-07, |
|
"loss": 1.5774, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.9026548672566372, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 5.51142611984834e-07, |
|
"loss": 1.6533, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.905604719764012, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 5.18274575295008e-07, |
|
"loss": 1.5782, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.9085545722713864, |
|
"grad_norm": 0.310546875, |
|
"learning_rate": 4.864144175729335e-07, |
|
"loss": 1.5866, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.911504424778761, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 4.555624616033427e-07, |
|
"loss": 1.7433, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.9144542772861355, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 4.2571901995659414e-07, |
|
"loss": 1.644, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.9174041297935105, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 3.96884394985475e-07, |
|
"loss": 1.8004, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.920353982300885, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 3.6905887882213717e-07, |
|
"loss": 1.5914, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.9233038348082596, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 3.422427533751771e-07, |
|
"loss": 1.5159, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.926253687315634, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 3.1643629032674924e-07, |
|
"loss": 1.761, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.9292035398230087, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 2.916397511298019e-07, |
|
"loss": 1.4598, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.9321533923303837, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 2.678533870054567e-07, |
|
"loss": 1.6699, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.935103244837758, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 2.4507743894045533e-07, |
|
"loss": 1.6699, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.938053097345133, |
|
"grad_norm": 0.296875, |
|
"learning_rate": 2.2331213768468363e-07, |
|
"loss": 1.597, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.9410029498525074, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 2.0255770374890682e-07, |
|
"loss": 1.8973, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.943952802359882, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 1.8281434740247128e-07, |
|
"loss": 1.7737, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.9469026548672566, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 1.6408226867118403e-07, |
|
"loss": 1.64, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.949852507374631, |
|
"grad_norm": 0.3125, |
|
"learning_rate": 1.4636165733532546e-07, |
|
"loss": 1.611, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.949852507374631, |
|
"eval_loss": 1.6559995412826538, |
|
"eval_runtime": 31.6925, |
|
"eval_samples_per_second": 31.553, |
|
"eval_steps_per_second": 3.944, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.952802359882006, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 1.2965269292767313e-07, |
|
"loss": 1.6172, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.9557522123893807, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 1.1395554473171422e-07, |
|
"loss": 1.7035, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.9587020648967552, |
|
"grad_norm": 0.32421875, |
|
"learning_rate": 9.927037177993592e-08, |
|
"loss": 1.8667, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.96165191740413, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 8.559732285219335e-08, |
|
"loss": 1.5963, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.9646017699115044, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 7.293653647421073e-08, |
|
"loss": 1.7494, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.967551622418879, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 6.128814091619362e-08, |
|
"loss": 1.639, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.9705014749262535, |
|
"grad_norm": 0.275390625, |
|
"learning_rate": 5.0652254191496664e-08, |
|
"loss": 1.4368, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.9734513274336285, |
|
"grad_norm": 0.35546875, |
|
"learning_rate": 4.102898405545785e-08, |
|
"loss": 1.5194, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.976401179941003, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 3.241842800428829e-08, |
|
"loss": 1.7057, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.9793510324483776, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 2.482067327409521e-08, |
|
"loss": 1.6251, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.982300884955752, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 1.8235796839982665e-08, |
|
"loss": 1.6894, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.9852507374631267, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 1.266386541530773e-08, |
|
"loss": 1.6548, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.9882005899705013, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 8.104935450969908e-09, |
|
"loss": 1.6608, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.991150442477876, |
|
"grad_norm": 0.30078125, |
|
"learning_rate": 4.559053134822744e-09, |
|
"loss": 1.7403, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.994100294985251, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 2.0262543912741295e-09, |
|
"loss": 1.6677, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.9970501474926254, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 5.065648808533219e-10, |
|
"loss": 1.5926, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0, |
|
"loss": 1.8633, |
|
"step": 1017 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1017, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.426452217371034e+16, |
|
"train_batch_size": 24, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|