|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.1, |
|
"eval_steps": 1000, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 31.81644058227539, |
|
"learning_rate": 8e-08, |
|
"loss": 1.5083, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.249370574951172, |
|
"learning_rate": 1.7000000000000001e-07, |
|
"loss": 1.2724, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.85301971435547, |
|
"learning_rate": 2.7e-07, |
|
"loss": 1.2571, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 12.382726669311523, |
|
"learning_rate": 3.7e-07, |
|
"loss": 1.101, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 43.00498962402344, |
|
"learning_rate": 4.6999999999999995e-07, |
|
"loss": 1.2847, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 16.686534881591797, |
|
"learning_rate": 5.6e-07, |
|
"loss": 0.8594, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 23.844993591308594, |
|
"learning_rate": 6.6e-07, |
|
"loss": 0.7645, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 15.943971633911133, |
|
"learning_rate": 7.599999999999999e-07, |
|
"loss": 0.7681, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 12.707146644592285, |
|
"learning_rate": 8.599999999999999e-07, |
|
"loss": 0.8728, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 13.066338539123535, |
|
"learning_rate": 9.6e-07, |
|
"loss": 0.7508, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.36861801147461, |
|
"learning_rate": 9.993939393939394e-07, |
|
"loss": 0.999, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 23.451967239379883, |
|
"learning_rate": 9.983838383838383e-07, |
|
"loss": 0.8396, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 14.662145614624023, |
|
"learning_rate": 9.973737373737373e-07, |
|
"loss": 1.0884, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.530464172363281, |
|
"learning_rate": 9.963636363636362e-07, |
|
"loss": 0.8505, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 30.15620231628418, |
|
"learning_rate": 9.953535353535352e-07, |
|
"loss": 0.971, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 43.50494384765625, |
|
"learning_rate": 9.943434343434343e-07, |
|
"loss": 0.9758, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 18.704565048217773, |
|
"learning_rate": 9.933333333333333e-07, |
|
"loss": 1.1095, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 51.077552795410156, |
|
"learning_rate": 9.923232323232322e-07, |
|
"loss": 1.0251, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 32.405906677246094, |
|
"learning_rate": 9.913131313131314e-07, |
|
"loss": 0.8793, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 35.2808952331543, |
|
"learning_rate": 9.903030303030303e-07, |
|
"loss": 0.6785, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 17.738399505615234, |
|
"learning_rate": 9.892929292929293e-07, |
|
"loss": 0.8258, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.202994346618652, |
|
"learning_rate": 9.882828282828282e-07, |
|
"loss": 0.8756, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.26487159729004, |
|
"learning_rate": 9.872727272727272e-07, |
|
"loss": 1.0467, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 32.37431335449219, |
|
"learning_rate": 9.862626262626263e-07, |
|
"loss": 0.9119, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.586151599884033, |
|
"learning_rate": 9.852525252525253e-07, |
|
"loss": 0.8556, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 16.249475479125977, |
|
"learning_rate": 9.842424242424242e-07, |
|
"loss": 0.7945, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 60.965965270996094, |
|
"learning_rate": 9.832323232323231e-07, |
|
"loss": 1.0668, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 19.940616607666016, |
|
"learning_rate": 9.82222222222222e-07, |
|
"loss": 0.6501, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 26.526460647583008, |
|
"learning_rate": 9.812121212121212e-07, |
|
"loss": 1.0423, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 27.455795288085938, |
|
"learning_rate": 9.802020202020202e-07, |
|
"loss": 0.6693, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.113489151000977, |
|
"learning_rate": 9.791919191919191e-07, |
|
"loss": 0.4784, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 29.75452423095703, |
|
"learning_rate": 9.78181818181818e-07, |
|
"loss": 0.5004, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 24.82709503173828, |
|
"learning_rate": 9.77171717171717e-07, |
|
"loss": 0.8543, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.23027801513672, |
|
"learning_rate": 9.761616161616162e-07, |
|
"loss": 0.796, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.593890190124512, |
|
"learning_rate": 9.751515151515151e-07, |
|
"loss": 0.672, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 19.550216674804688, |
|
"learning_rate": 9.74141414141414e-07, |
|
"loss": 0.5915, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.101367950439453, |
|
"learning_rate": 9.731313131313132e-07, |
|
"loss": 0.8967, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.973883152008057, |
|
"learning_rate": 9.721212121212122e-07, |
|
"loss": 0.7841, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 18.076982498168945, |
|
"learning_rate": 9.711111111111111e-07, |
|
"loss": 0.7783, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.5223013162612915, |
|
"learning_rate": 9.7010101010101e-07, |
|
"loss": 0.8647, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 14.444634437561035, |
|
"learning_rate": 9.69090909090909e-07, |
|
"loss": 0.6745, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 41.90908432006836, |
|
"learning_rate": 9.680808080808082e-07, |
|
"loss": 1.0749, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 27.912317276000977, |
|
"learning_rate": 9.67070707070707e-07, |
|
"loss": 0.5588, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 22.31749725341797, |
|
"learning_rate": 9.66060606060606e-07, |
|
"loss": 0.7683, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 57.88829040527344, |
|
"learning_rate": 9.65050505050505e-07, |
|
"loss": 0.5442, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 43.25718688964844, |
|
"learning_rate": 9.64040404040404e-07, |
|
"loss": 0.8028, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 21.001134872436523, |
|
"learning_rate": 9.630303030303029e-07, |
|
"loss": 0.7472, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 13.919650077819824, |
|
"learning_rate": 9.62020202020202e-07, |
|
"loss": 0.5318, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 26.845335006713867, |
|
"learning_rate": 9.61010101010101e-07, |
|
"loss": 1.012, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.0955867767334, |
|
"learning_rate": 9.6e-07, |
|
"loss": 0.9174, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.6511812210083, |
|
"learning_rate": 9.589898989898989e-07, |
|
"loss": 0.691, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 12.875068664550781, |
|
"learning_rate": 9.579797979797978e-07, |
|
"loss": 0.7781, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 28.99612045288086, |
|
"learning_rate": 9.56969696969697e-07, |
|
"loss": 0.7791, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 47.6656379699707, |
|
"learning_rate": 9.55959595959596e-07, |
|
"loss": 0.6166, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 59.9251708984375, |
|
"learning_rate": 9.549494949494948e-07, |
|
"loss": 0.6836, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 34.26139831542969, |
|
"learning_rate": 9.53939393939394e-07, |
|
"loss": 0.4317, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 47.75581359863281, |
|
"learning_rate": 9.529292929292929e-07, |
|
"loss": 0.5685, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 63.76834487915039, |
|
"learning_rate": 9.519191919191919e-07, |
|
"loss": 0.8805, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 22.971323013305664, |
|
"learning_rate": 9.509090909090908e-07, |
|
"loss": 0.8103, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 42.151161193847656, |
|
"learning_rate": 9.498989898989899e-07, |
|
"loss": 0.7563, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 13.12915325164795, |
|
"learning_rate": 9.488888888888888e-07, |
|
"loss": 0.6844, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 37.901973724365234, |
|
"learning_rate": 9.478787878787879e-07, |
|
"loss": 0.7467, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 29.114883422851562, |
|
"learning_rate": 9.468686868686868e-07, |
|
"loss": 0.7759, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 37.42153549194336, |
|
"learning_rate": 9.458585858585858e-07, |
|
"loss": 0.7221, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 46.287559509277344, |
|
"learning_rate": 9.448484848484848e-07, |
|
"loss": 0.7219, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 76.34626770019531, |
|
"learning_rate": 9.438383838383838e-07, |
|
"loss": 0.6938, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 20.480300903320312, |
|
"learning_rate": 9.428282828282827e-07, |
|
"loss": 0.8135, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 9.508841514587402, |
|
"learning_rate": 9.418181818181818e-07, |
|
"loss": 0.6024, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 29.385066986083984, |
|
"learning_rate": 9.408080808080807e-07, |
|
"loss": 0.8891, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 13.06400203704834, |
|
"learning_rate": 9.397979797979797e-07, |
|
"loss": 0.6723, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 36.94926071166992, |
|
"learning_rate": 9.387878787878788e-07, |
|
"loss": 0.5682, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 30.849035263061523, |
|
"learning_rate": 9.377777777777777e-07, |
|
"loss": 0.6693, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 27.781005859375, |
|
"learning_rate": 9.367676767676768e-07, |
|
"loss": 0.7705, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.268486976623535, |
|
"learning_rate": 9.357575757575757e-07, |
|
"loss": 0.7672, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 70.79887390136719, |
|
"learning_rate": 9.347474747474747e-07, |
|
"loss": 0.7617, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 71.6796875, |
|
"learning_rate": 9.337373737373737e-07, |
|
"loss": 0.5425, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 21.205890655517578, |
|
"learning_rate": 9.327272727272727e-07, |
|
"loss": 0.6768, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 38.276634216308594, |
|
"learning_rate": 9.317171717171717e-07, |
|
"loss": 0.8681, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 17.904993057250977, |
|
"learning_rate": 9.307070707070707e-07, |
|
"loss": 0.7128, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 32.66667938232422, |
|
"learning_rate": 9.296969696969696e-07, |
|
"loss": 0.8662, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 7.020589828491211, |
|
"learning_rate": 9.286868686868687e-07, |
|
"loss": 0.7344, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 33.822608947753906, |
|
"learning_rate": 9.276767676767676e-07, |
|
"loss": 0.7825, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 21.147552490234375, |
|
"learning_rate": 9.266666666666665e-07, |
|
"loss": 0.7716, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 22.41349983215332, |
|
"learning_rate": 9.256565656565656e-07, |
|
"loss": 0.4673, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 24.813737869262695, |
|
"learning_rate": 9.246464646464645e-07, |
|
"loss": 0.7137, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 31.02164649963379, |
|
"learning_rate": 9.236363636363636e-07, |
|
"loss": 1.0195, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 19.22023582458496, |
|
"learning_rate": 9.226262626262625e-07, |
|
"loss": 0.8201, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 21.087970733642578, |
|
"learning_rate": 9.216161616161616e-07, |
|
"loss": 0.6296, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.970023155212402, |
|
"learning_rate": 9.206060606060606e-07, |
|
"loss": 0.3712, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 28.498889923095703, |
|
"learning_rate": 9.195959595959596e-07, |
|
"loss": 0.7953, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 21.259479522705078, |
|
"learning_rate": 9.185858585858585e-07, |
|
"loss": 0.7346, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 13.871064186096191, |
|
"learning_rate": 9.175757575757576e-07, |
|
"loss": 0.6644, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 44.91512680053711, |
|
"learning_rate": 9.165656565656565e-07, |
|
"loss": 0.5467, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 11.775423049926758, |
|
"learning_rate": 9.155555555555556e-07, |
|
"loss": 0.5149, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 35.62663650512695, |
|
"learning_rate": 9.145454545454545e-07, |
|
"loss": 0.6002, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 32.12046813964844, |
|
"learning_rate": 9.135353535353535e-07, |
|
"loss": 0.8875, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 37.11963653564453, |
|
"learning_rate": 9.126262626262626e-07, |
|
"loss": 0.9515, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 21.641096115112305, |
|
"learning_rate": 9.116161616161616e-07, |
|
"loss": 0.7849, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 29.253921508789062, |
|
"learning_rate": 9.106060606060606e-07, |
|
"loss": 0.5926, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 27.01972198486328, |
|
"learning_rate": 9.095959595959596e-07, |
|
"loss": 0.7061, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"eval_loss": 0.725902259349823, |
|
"eval_runtime": 274.8847, |
|
"eval_samples_per_second": 3.638, |
|
"eval_steps_per_second": 3.638, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 16.84354019165039, |
|
"learning_rate": 9.085858585858586e-07, |
|
"loss": 0.6186, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 36.384700775146484, |
|
"learning_rate": 9.075757575757576e-07, |
|
"loss": 0.8362, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 37.640892028808594, |
|
"learning_rate": 9.065656565656565e-07, |
|
"loss": 0.7901, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 44.735076904296875, |
|
"learning_rate": 9.055555555555556e-07, |
|
"loss": 0.5116, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 46.5770263671875, |
|
"learning_rate": 9.045454545454545e-07, |
|
"loss": 0.7274, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.10406364500522614, |
|
"learning_rate": 9.035353535353534e-07, |
|
"loss": 0.4903, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 18.263145446777344, |
|
"learning_rate": 9.025252525252525e-07, |
|
"loss": 0.7371, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 33.42209243774414, |
|
"learning_rate": 9.015151515151514e-07, |
|
"loss": 0.8111, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 48.237525939941406, |
|
"learning_rate": 9.005050505050504e-07, |
|
"loss": 0.6233, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5421327352523804, |
|
"learning_rate": 8.994949494949494e-07, |
|
"loss": 0.6609, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 29.761442184448242, |
|
"learning_rate": 8.984848484848484e-07, |
|
"loss": 0.4407, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 31.435007095336914, |
|
"learning_rate": 8.974747474747474e-07, |
|
"loss": 0.7207, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 28.20315170288086, |
|
"learning_rate": 8.964646464646465e-07, |
|
"loss": 0.6879, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 19.431041717529297, |
|
"learning_rate": 8.954545454545454e-07, |
|
"loss": 0.7292, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 40.8221435546875, |
|
"learning_rate": 8.944444444444445e-07, |
|
"loss": 0.9112, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.42428731918335, |
|
"learning_rate": 8.934343434343434e-07, |
|
"loss": 0.484, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 24.146841049194336, |
|
"learning_rate": 8.924242424242425e-07, |
|
"loss": 0.4881, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 52.02534866333008, |
|
"learning_rate": 8.914141414141414e-07, |
|
"loss": 0.8042, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 26.81230926513672, |
|
"learning_rate": 8.904040404040404e-07, |
|
"loss": 0.6995, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 9.775701522827148, |
|
"learning_rate": 8.893939393939394e-07, |
|
"loss": 0.8203, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.65981674194336, |
|
"learning_rate": 8.883838383838383e-07, |
|
"loss": 0.9154, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.086986541748047, |
|
"learning_rate": 8.873737373737373e-07, |
|
"loss": 0.4731, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.6555233001709, |
|
"learning_rate": 8.863636363636363e-07, |
|
"loss": 0.7798, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 13.60240364074707, |
|
"learning_rate": 8.853535353535353e-07, |
|
"loss": 0.6382, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 20.39061737060547, |
|
"learning_rate": 8.843434343434343e-07, |
|
"loss": 0.6077, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 14.251206398010254, |
|
"learning_rate": 8.833333333333333e-07, |
|
"loss": 0.5736, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 63.768611907958984, |
|
"learning_rate": 8.823232323232322e-07, |
|
"loss": 0.7059, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 44.81045150756836, |
|
"learning_rate": 8.813131313131313e-07, |
|
"loss": 0.6352, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 15.952017784118652, |
|
"learning_rate": 8.803030303030302e-07, |
|
"loss": 0.7523, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 27.234148025512695, |
|
"learning_rate": 8.792929292929293e-07, |
|
"loss": 0.4924, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 44.29439163208008, |
|
"learning_rate": 8.782828282828283e-07, |
|
"loss": 0.6422, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 35.106658935546875, |
|
"learning_rate": 8.772727272727273e-07, |
|
"loss": 0.655, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.1618399620056152, |
|
"learning_rate": 8.762626262626263e-07, |
|
"loss": 0.6463, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 36.30659866333008, |
|
"learning_rate": 8.752525252525253e-07, |
|
"loss": 0.5618, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 18.705991744995117, |
|
"learning_rate": 8.742424242424242e-07, |
|
"loss": 0.5887, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.828080654144287, |
|
"learning_rate": 8.732323232323232e-07, |
|
"loss": 0.6611, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 47.979820251464844, |
|
"learning_rate": 8.722222222222222e-07, |
|
"loss": 0.4975, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 25.263946533203125, |
|
"learning_rate": 8.712121212121211e-07, |
|
"loss": 0.761, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.21978527307510376, |
|
"learning_rate": 8.702020202020202e-07, |
|
"loss": 0.5013, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 15.600090026855469, |
|
"learning_rate": 8.691919191919191e-07, |
|
"loss": 0.5375, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 25.815698623657227, |
|
"learning_rate": 8.681818181818182e-07, |
|
"loss": 0.8176, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 44.83120346069336, |
|
"learning_rate": 8.671717171717171e-07, |
|
"loss": 0.5207, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 20.984037399291992, |
|
"learning_rate": 8.661616161616161e-07, |
|
"loss": 0.5024, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 26.290699005126953, |
|
"learning_rate": 8.651515151515151e-07, |
|
"loss": 0.7017, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 23.3577880859375, |
|
"learning_rate": 8.641414141414141e-07, |
|
"loss": 0.6419, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 21.74049949645996, |
|
"learning_rate": 8.63131313131313e-07, |
|
"loss": 0.5426, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.5753927230834961, |
|
"learning_rate": 8.62121212121212e-07, |
|
"loss": 0.6417, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 36.96406936645508, |
|
"learning_rate": 8.611111111111111e-07, |
|
"loss": 0.6999, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.05758470296859741, |
|
"learning_rate": 8.601010101010102e-07, |
|
"loss": 0.816, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 51.20552062988281, |
|
"learning_rate": 8.590909090909091e-07, |
|
"loss": 0.7171, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 34.43236541748047, |
|
"learning_rate": 8.58080808080808e-07, |
|
"loss": 0.7448, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 89.9273681640625, |
|
"learning_rate": 8.570707070707071e-07, |
|
"loss": 0.6324, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 9.697078704833984, |
|
"learning_rate": 8.56060606060606e-07, |
|
"loss": 0.7951, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 28.903240203857422, |
|
"learning_rate": 8.55050505050505e-07, |
|
"loss": 0.6224, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 19.746826171875, |
|
"learning_rate": 8.54040404040404e-07, |
|
"loss": 0.4143, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 33.706146240234375, |
|
"learning_rate": 8.53030303030303e-07, |
|
"loss": 0.6803, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 24.144351959228516, |
|
"learning_rate": 8.52020202020202e-07, |
|
"loss": 0.6015, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 34.93477249145508, |
|
"learning_rate": 8.51010101010101e-07, |
|
"loss": 0.5599, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 31.863859176635742, |
|
"learning_rate": 8.499999999999999e-07, |
|
"loss": 0.7127, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 48.534549713134766, |
|
"learning_rate": 8.48989898989899e-07, |
|
"loss": 0.7777, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 35.08165740966797, |
|
"learning_rate": 8.479797979797979e-07, |
|
"loss": 0.6571, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 33.532325744628906, |
|
"learning_rate": 8.469696969696968e-07, |
|
"loss": 0.7386, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.014142200350761414, |
|
"learning_rate": 8.459595959595959e-07, |
|
"loss": 0.5505, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 29.99919319152832, |
|
"learning_rate": 8.449494949494948e-07, |
|
"loss": 0.4274, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.206497192382812, |
|
"learning_rate": 8.43939393939394e-07, |
|
"loss": 0.4399, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 17.408052444458008, |
|
"learning_rate": 8.429292929292929e-07, |
|
"loss": 0.5262, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 14.05802059173584, |
|
"learning_rate": 8.419191919191919e-07, |
|
"loss": 0.5503, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 13.401651382446289, |
|
"learning_rate": 8.409090909090909e-07, |
|
"loss": 0.813, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 28.40484619140625, |
|
"learning_rate": 8.398989898989899e-07, |
|
"loss": 0.7321, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 39.133670806884766, |
|
"learning_rate": 8.388888888888888e-07, |
|
"loss": 0.7129, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 26.99220848083496, |
|
"learning_rate": 8.378787878787879e-07, |
|
"loss": 0.645, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.875778198242188, |
|
"learning_rate": 8.368686868686868e-07, |
|
"loss": 0.6779, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 37.771705627441406, |
|
"learning_rate": 8.358585858585859e-07, |
|
"loss": 0.6104, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 17.48222541809082, |
|
"learning_rate": 8.348484848484848e-07, |
|
"loss": 0.5982, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 13.396077156066895, |
|
"learning_rate": 8.338383838383838e-07, |
|
"loss": 0.6295, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 48.97739791870117, |
|
"learning_rate": 8.328282828282828e-07, |
|
"loss": 0.8439, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 74.33853149414062, |
|
"learning_rate": 8.318181818181817e-07, |
|
"loss": 1.0566, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 7.2910051345825195, |
|
"learning_rate": 8.308080808080807e-07, |
|
"loss": 0.5226, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 27.389278411865234, |
|
"learning_rate": 8.297979797979797e-07, |
|
"loss": 1.1226, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.284111976623535, |
|
"learning_rate": 8.287878787878787e-07, |
|
"loss": 0.4101, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.962563514709473, |
|
"learning_rate": 8.277777777777777e-07, |
|
"loss": 0.5349, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 47.56216049194336, |
|
"learning_rate": 8.267676767676768e-07, |
|
"loss": 0.6863, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 31.547109603881836, |
|
"learning_rate": 8.257575757575757e-07, |
|
"loss": 0.7484, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.890266418457031, |
|
"learning_rate": 8.247474747474748e-07, |
|
"loss": 0.7518, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 25.44080352783203, |
|
"learning_rate": 8.237373737373737e-07, |
|
"loss": 0.8552, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 19.411556243896484, |
|
"learning_rate": 8.227272727272727e-07, |
|
"loss": 0.6285, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.5483574867248535, |
|
"learning_rate": 8.217171717171717e-07, |
|
"loss": 0.5478, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 22.145336151123047, |
|
"learning_rate": 8.207070707070707e-07, |
|
"loss": 0.7499, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 30.043731689453125, |
|
"learning_rate": 8.196969696969697e-07, |
|
"loss": 0.7252, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 39.9935417175293, |
|
"learning_rate": 8.186868686868687e-07, |
|
"loss": 0.7348, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.8807525634765625, |
|
"learning_rate": 8.176767676767676e-07, |
|
"loss": 0.381, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 16.082597732543945, |
|
"learning_rate": 8.166666666666666e-07, |
|
"loss": 0.6413, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 15.713370323181152, |
|
"learning_rate": 8.156565656565656e-07, |
|
"loss": 0.4765, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 43.45647048950195, |
|
"learning_rate": 8.146464646464645e-07, |
|
"loss": 0.6653, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 51.137046813964844, |
|
"learning_rate": 8.136363636363636e-07, |
|
"loss": 0.6915, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 33.2936897277832, |
|
"learning_rate": 8.126262626262625e-07, |
|
"loss": 0.4993, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 10.87304973602295, |
|
"learning_rate": 8.116161616161616e-07, |
|
"loss": 0.7893, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.774446427822113, |
|
"learning_rate": 8.106060606060605e-07, |
|
"loss": 0.5946, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 26.221094131469727, |
|
"learning_rate": 8.095959595959596e-07, |
|
"loss": 0.8534, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 46.51724624633789, |
|
"learning_rate": 8.085858585858586e-07, |
|
"loss": 0.4862, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 0.6236868500709534, |
|
"eval_runtime": 274.2428, |
|
"eval_samples_per_second": 3.646, |
|
"eval_steps_per_second": 3.646, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 10000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2000, |
|
"total_flos": 9.4282098671616e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|