math-llama-3-LORA-Arithmetic-8k / trainer_state.json
satvik-dixit's picture
Uploaded checkpoint-8000
4ee2db9 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.4,
"eval_steps": 1000,
"global_step": 8000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0,
"grad_norm": 31.81644058227539,
"learning_rate": 8e-08,
"loss": 1.5083,
"step": 10
},
{
"epoch": 0.0,
"grad_norm": 16.249370574951172,
"learning_rate": 1.7000000000000001e-07,
"loss": 1.2724,
"step": 20
},
{
"epoch": 0.0,
"grad_norm": 16.85301971435547,
"learning_rate": 2.7e-07,
"loss": 1.2571,
"step": 30
},
{
"epoch": 0.0,
"grad_norm": 12.382726669311523,
"learning_rate": 3.7e-07,
"loss": 1.101,
"step": 40
},
{
"epoch": 0.0,
"grad_norm": 43.00498962402344,
"learning_rate": 4.6999999999999995e-07,
"loss": 1.2847,
"step": 50
},
{
"epoch": 0.0,
"grad_norm": 16.686534881591797,
"learning_rate": 5.6e-07,
"loss": 0.8594,
"step": 60
},
{
"epoch": 0.0,
"grad_norm": 23.844993591308594,
"learning_rate": 6.6e-07,
"loss": 0.7645,
"step": 70
},
{
"epoch": 0.0,
"grad_norm": 15.943971633911133,
"learning_rate": 7.599999999999999e-07,
"loss": 0.7681,
"step": 80
},
{
"epoch": 0.0,
"grad_norm": 12.707146644592285,
"learning_rate": 8.599999999999999e-07,
"loss": 0.8728,
"step": 90
},
{
"epoch": 0.01,
"grad_norm": 13.066338539123535,
"learning_rate": 9.6e-07,
"loss": 0.7508,
"step": 100
},
{
"epoch": 0.01,
"grad_norm": 21.36861801147461,
"learning_rate": 9.993939393939394e-07,
"loss": 0.999,
"step": 110
},
{
"epoch": 0.01,
"grad_norm": 23.451967239379883,
"learning_rate": 9.983838383838383e-07,
"loss": 0.8396,
"step": 120
},
{
"epoch": 0.01,
"grad_norm": 14.662145614624023,
"learning_rate": 9.973737373737373e-07,
"loss": 1.0884,
"step": 130
},
{
"epoch": 0.01,
"grad_norm": 9.530464172363281,
"learning_rate": 9.963636363636362e-07,
"loss": 0.8505,
"step": 140
},
{
"epoch": 0.01,
"grad_norm": 30.15620231628418,
"learning_rate": 9.953535353535352e-07,
"loss": 0.971,
"step": 150
},
{
"epoch": 0.01,
"grad_norm": 43.50494384765625,
"learning_rate": 9.943434343434343e-07,
"loss": 0.9758,
"step": 160
},
{
"epoch": 0.01,
"grad_norm": 18.704565048217773,
"learning_rate": 9.933333333333333e-07,
"loss": 1.1095,
"step": 170
},
{
"epoch": 0.01,
"grad_norm": 51.077552795410156,
"learning_rate": 9.923232323232322e-07,
"loss": 1.0251,
"step": 180
},
{
"epoch": 0.01,
"grad_norm": 32.405906677246094,
"learning_rate": 9.913131313131314e-07,
"loss": 0.8793,
"step": 190
},
{
"epoch": 0.01,
"grad_norm": 35.2808952331543,
"learning_rate": 9.903030303030303e-07,
"loss": 0.6785,
"step": 200
},
{
"epoch": 0.01,
"grad_norm": 17.738399505615234,
"learning_rate": 9.892929292929293e-07,
"loss": 0.8258,
"step": 210
},
{
"epoch": 0.01,
"grad_norm": 8.202994346618652,
"learning_rate": 9.882828282828282e-07,
"loss": 0.8756,
"step": 220
},
{
"epoch": 0.01,
"grad_norm": 16.26487159729004,
"learning_rate": 9.872727272727272e-07,
"loss": 1.0467,
"step": 230
},
{
"epoch": 0.01,
"grad_norm": 32.37431335449219,
"learning_rate": 9.862626262626263e-07,
"loss": 0.9119,
"step": 240
},
{
"epoch": 0.01,
"grad_norm": 5.586151599884033,
"learning_rate": 9.852525252525253e-07,
"loss": 0.8556,
"step": 250
},
{
"epoch": 0.01,
"grad_norm": 16.249475479125977,
"learning_rate": 9.842424242424242e-07,
"loss": 0.7945,
"step": 260
},
{
"epoch": 0.01,
"grad_norm": 60.965965270996094,
"learning_rate": 9.832323232323231e-07,
"loss": 1.0668,
"step": 270
},
{
"epoch": 0.01,
"grad_norm": 19.940616607666016,
"learning_rate": 9.82222222222222e-07,
"loss": 0.6501,
"step": 280
},
{
"epoch": 0.01,
"grad_norm": 26.526460647583008,
"learning_rate": 9.812121212121212e-07,
"loss": 1.0423,
"step": 290
},
{
"epoch": 0.01,
"grad_norm": 27.455795288085938,
"learning_rate": 9.802020202020202e-07,
"loss": 0.6693,
"step": 300
},
{
"epoch": 0.02,
"grad_norm": 18.113489151000977,
"learning_rate": 9.791919191919191e-07,
"loss": 0.4784,
"step": 310
},
{
"epoch": 0.02,
"grad_norm": 29.75452423095703,
"learning_rate": 9.78181818181818e-07,
"loss": 0.5004,
"step": 320
},
{
"epoch": 0.02,
"grad_norm": 24.82709503173828,
"learning_rate": 9.77171717171717e-07,
"loss": 0.8543,
"step": 330
},
{
"epoch": 0.02,
"grad_norm": 22.23027801513672,
"learning_rate": 9.761616161616162e-07,
"loss": 0.796,
"step": 340
},
{
"epoch": 0.02,
"grad_norm": 9.593890190124512,
"learning_rate": 9.751515151515151e-07,
"loss": 0.672,
"step": 350
},
{
"epoch": 0.02,
"grad_norm": 19.550216674804688,
"learning_rate": 9.74141414141414e-07,
"loss": 0.5915,
"step": 360
},
{
"epoch": 0.02,
"grad_norm": 18.101367950439453,
"learning_rate": 9.731313131313132e-07,
"loss": 0.8967,
"step": 370
},
{
"epoch": 0.02,
"grad_norm": 6.973883152008057,
"learning_rate": 9.721212121212122e-07,
"loss": 0.7841,
"step": 380
},
{
"epoch": 0.02,
"grad_norm": 18.076982498168945,
"learning_rate": 9.711111111111111e-07,
"loss": 0.7783,
"step": 390
},
{
"epoch": 0.02,
"grad_norm": 1.5223013162612915,
"learning_rate": 9.7010101010101e-07,
"loss": 0.8647,
"step": 400
},
{
"epoch": 0.02,
"grad_norm": 14.444634437561035,
"learning_rate": 9.69090909090909e-07,
"loss": 0.6745,
"step": 410
},
{
"epoch": 0.02,
"grad_norm": 41.90908432006836,
"learning_rate": 9.680808080808082e-07,
"loss": 1.0749,
"step": 420
},
{
"epoch": 0.02,
"grad_norm": 27.912317276000977,
"learning_rate": 9.67070707070707e-07,
"loss": 0.5588,
"step": 430
},
{
"epoch": 0.02,
"grad_norm": 22.31749725341797,
"learning_rate": 9.66060606060606e-07,
"loss": 0.7683,
"step": 440
},
{
"epoch": 0.02,
"grad_norm": 57.88829040527344,
"learning_rate": 9.65050505050505e-07,
"loss": 0.5442,
"step": 450
},
{
"epoch": 0.02,
"grad_norm": 43.25718688964844,
"learning_rate": 9.64040404040404e-07,
"loss": 0.8028,
"step": 460
},
{
"epoch": 0.02,
"grad_norm": 21.001134872436523,
"learning_rate": 9.630303030303029e-07,
"loss": 0.7472,
"step": 470
},
{
"epoch": 0.02,
"grad_norm": 13.919650077819824,
"learning_rate": 9.62020202020202e-07,
"loss": 0.5318,
"step": 480
},
{
"epoch": 0.02,
"grad_norm": 26.845335006713867,
"learning_rate": 9.61010101010101e-07,
"loss": 1.012,
"step": 490
},
{
"epoch": 0.03,
"grad_norm": 20.0955867767334,
"learning_rate": 9.6e-07,
"loss": 0.9174,
"step": 500
},
{
"epoch": 0.03,
"grad_norm": 9.6511812210083,
"learning_rate": 9.589898989898989e-07,
"loss": 0.691,
"step": 510
},
{
"epoch": 0.03,
"grad_norm": 12.875068664550781,
"learning_rate": 9.579797979797978e-07,
"loss": 0.7781,
"step": 520
},
{
"epoch": 0.03,
"grad_norm": 28.99612045288086,
"learning_rate": 9.56969696969697e-07,
"loss": 0.7791,
"step": 530
},
{
"epoch": 0.03,
"grad_norm": 47.6656379699707,
"learning_rate": 9.55959595959596e-07,
"loss": 0.6166,
"step": 540
},
{
"epoch": 0.03,
"grad_norm": 59.9251708984375,
"learning_rate": 9.549494949494948e-07,
"loss": 0.6836,
"step": 550
},
{
"epoch": 0.03,
"grad_norm": 34.26139831542969,
"learning_rate": 9.53939393939394e-07,
"loss": 0.4317,
"step": 560
},
{
"epoch": 0.03,
"grad_norm": 47.75581359863281,
"learning_rate": 9.529292929292929e-07,
"loss": 0.5685,
"step": 570
},
{
"epoch": 0.03,
"grad_norm": 63.76834487915039,
"learning_rate": 9.519191919191919e-07,
"loss": 0.8805,
"step": 580
},
{
"epoch": 0.03,
"grad_norm": 22.971323013305664,
"learning_rate": 9.509090909090908e-07,
"loss": 0.8103,
"step": 590
},
{
"epoch": 0.03,
"grad_norm": 42.151161193847656,
"learning_rate": 9.498989898989899e-07,
"loss": 0.7563,
"step": 600
},
{
"epoch": 0.03,
"grad_norm": 13.12915325164795,
"learning_rate": 9.488888888888888e-07,
"loss": 0.6844,
"step": 610
},
{
"epoch": 0.03,
"grad_norm": 37.901973724365234,
"learning_rate": 9.478787878787879e-07,
"loss": 0.7467,
"step": 620
},
{
"epoch": 0.03,
"grad_norm": 29.114883422851562,
"learning_rate": 9.468686868686868e-07,
"loss": 0.7759,
"step": 630
},
{
"epoch": 0.03,
"grad_norm": 37.42153549194336,
"learning_rate": 9.458585858585858e-07,
"loss": 0.7221,
"step": 640
},
{
"epoch": 0.03,
"grad_norm": 46.287559509277344,
"learning_rate": 9.448484848484848e-07,
"loss": 0.7219,
"step": 650
},
{
"epoch": 0.03,
"grad_norm": 76.34626770019531,
"learning_rate": 9.438383838383838e-07,
"loss": 0.6938,
"step": 660
},
{
"epoch": 0.03,
"grad_norm": 20.480300903320312,
"learning_rate": 9.428282828282827e-07,
"loss": 0.8135,
"step": 670
},
{
"epoch": 0.03,
"grad_norm": 9.508841514587402,
"learning_rate": 9.418181818181818e-07,
"loss": 0.6024,
"step": 680
},
{
"epoch": 0.03,
"grad_norm": 29.385066986083984,
"learning_rate": 9.408080808080807e-07,
"loss": 0.8891,
"step": 690
},
{
"epoch": 0.04,
"grad_norm": 13.06400203704834,
"learning_rate": 9.397979797979797e-07,
"loss": 0.6723,
"step": 700
},
{
"epoch": 0.04,
"grad_norm": 36.94926071166992,
"learning_rate": 9.387878787878788e-07,
"loss": 0.5682,
"step": 710
},
{
"epoch": 0.04,
"grad_norm": 30.849035263061523,
"learning_rate": 9.377777777777777e-07,
"loss": 0.6693,
"step": 720
},
{
"epoch": 0.04,
"grad_norm": 27.781005859375,
"learning_rate": 9.367676767676768e-07,
"loss": 0.7705,
"step": 730
},
{
"epoch": 0.04,
"grad_norm": 5.268486976623535,
"learning_rate": 9.357575757575757e-07,
"loss": 0.7672,
"step": 740
},
{
"epoch": 0.04,
"grad_norm": 70.79887390136719,
"learning_rate": 9.347474747474747e-07,
"loss": 0.7617,
"step": 750
},
{
"epoch": 0.04,
"grad_norm": 71.6796875,
"learning_rate": 9.337373737373737e-07,
"loss": 0.5425,
"step": 760
},
{
"epoch": 0.04,
"grad_norm": 21.205890655517578,
"learning_rate": 9.327272727272727e-07,
"loss": 0.6768,
"step": 770
},
{
"epoch": 0.04,
"grad_norm": 38.276634216308594,
"learning_rate": 9.317171717171717e-07,
"loss": 0.8681,
"step": 780
},
{
"epoch": 0.04,
"grad_norm": 17.904993057250977,
"learning_rate": 9.307070707070707e-07,
"loss": 0.7128,
"step": 790
},
{
"epoch": 0.04,
"grad_norm": 32.66667938232422,
"learning_rate": 9.296969696969696e-07,
"loss": 0.8662,
"step": 800
},
{
"epoch": 0.04,
"grad_norm": 7.020589828491211,
"learning_rate": 9.286868686868687e-07,
"loss": 0.7344,
"step": 810
},
{
"epoch": 0.04,
"grad_norm": 33.822608947753906,
"learning_rate": 9.276767676767676e-07,
"loss": 0.7825,
"step": 820
},
{
"epoch": 0.04,
"grad_norm": 21.147552490234375,
"learning_rate": 9.266666666666665e-07,
"loss": 0.7716,
"step": 830
},
{
"epoch": 0.04,
"grad_norm": 22.41349983215332,
"learning_rate": 9.256565656565656e-07,
"loss": 0.4673,
"step": 840
},
{
"epoch": 0.04,
"grad_norm": 24.813737869262695,
"learning_rate": 9.246464646464645e-07,
"loss": 0.7137,
"step": 850
},
{
"epoch": 0.04,
"grad_norm": 31.02164649963379,
"learning_rate": 9.236363636363636e-07,
"loss": 1.0195,
"step": 860
},
{
"epoch": 0.04,
"grad_norm": 19.22023582458496,
"learning_rate": 9.226262626262625e-07,
"loss": 0.8201,
"step": 870
},
{
"epoch": 0.04,
"grad_norm": 21.087970733642578,
"learning_rate": 9.216161616161616e-07,
"loss": 0.6296,
"step": 880
},
{
"epoch": 0.04,
"grad_norm": 8.970023155212402,
"learning_rate": 9.206060606060606e-07,
"loss": 0.3712,
"step": 890
},
{
"epoch": 0.04,
"grad_norm": 28.498889923095703,
"learning_rate": 9.195959595959596e-07,
"loss": 0.7953,
"step": 900
},
{
"epoch": 0.05,
"grad_norm": 21.259479522705078,
"learning_rate": 9.185858585858585e-07,
"loss": 0.7346,
"step": 910
},
{
"epoch": 0.05,
"grad_norm": 13.871064186096191,
"learning_rate": 9.175757575757576e-07,
"loss": 0.6644,
"step": 920
},
{
"epoch": 0.05,
"grad_norm": 44.91512680053711,
"learning_rate": 9.165656565656565e-07,
"loss": 0.5467,
"step": 930
},
{
"epoch": 0.05,
"grad_norm": 11.775423049926758,
"learning_rate": 9.155555555555556e-07,
"loss": 0.5149,
"step": 940
},
{
"epoch": 0.05,
"grad_norm": 35.62663650512695,
"learning_rate": 9.145454545454545e-07,
"loss": 0.6002,
"step": 950
},
{
"epoch": 0.05,
"grad_norm": 32.12046813964844,
"learning_rate": 9.135353535353535e-07,
"loss": 0.8875,
"step": 960
},
{
"epoch": 0.05,
"grad_norm": 37.11963653564453,
"learning_rate": 9.126262626262626e-07,
"loss": 0.9515,
"step": 970
},
{
"epoch": 0.05,
"grad_norm": 21.641096115112305,
"learning_rate": 9.116161616161616e-07,
"loss": 0.7849,
"step": 980
},
{
"epoch": 0.05,
"grad_norm": 29.253921508789062,
"learning_rate": 9.106060606060606e-07,
"loss": 0.5926,
"step": 990
},
{
"epoch": 0.05,
"grad_norm": 27.01972198486328,
"learning_rate": 9.095959595959596e-07,
"loss": 0.7061,
"step": 1000
},
{
"epoch": 0.05,
"eval_loss": 0.725902259349823,
"eval_runtime": 274.8847,
"eval_samples_per_second": 3.638,
"eval_steps_per_second": 3.638,
"step": 1000
},
{
"epoch": 0.05,
"grad_norm": 16.84354019165039,
"learning_rate": 9.085858585858586e-07,
"loss": 0.6186,
"step": 1010
},
{
"epoch": 0.05,
"grad_norm": 36.384700775146484,
"learning_rate": 9.075757575757576e-07,
"loss": 0.8362,
"step": 1020
},
{
"epoch": 0.05,
"grad_norm": 37.640892028808594,
"learning_rate": 9.065656565656565e-07,
"loss": 0.7901,
"step": 1030
},
{
"epoch": 0.05,
"grad_norm": 44.735076904296875,
"learning_rate": 9.055555555555556e-07,
"loss": 0.5116,
"step": 1040
},
{
"epoch": 0.05,
"grad_norm": 46.5770263671875,
"learning_rate": 9.045454545454545e-07,
"loss": 0.7274,
"step": 1050
},
{
"epoch": 0.05,
"grad_norm": 0.10406364500522614,
"learning_rate": 9.035353535353534e-07,
"loss": 0.4903,
"step": 1060
},
{
"epoch": 0.05,
"grad_norm": 18.263145446777344,
"learning_rate": 9.025252525252525e-07,
"loss": 0.7371,
"step": 1070
},
{
"epoch": 0.05,
"grad_norm": 33.42209243774414,
"learning_rate": 9.015151515151514e-07,
"loss": 0.8111,
"step": 1080
},
{
"epoch": 0.05,
"grad_norm": 48.237525939941406,
"learning_rate": 9.005050505050504e-07,
"loss": 0.6233,
"step": 1090
},
{
"epoch": 0.06,
"grad_norm": 1.5421327352523804,
"learning_rate": 8.994949494949494e-07,
"loss": 0.6609,
"step": 1100
},
{
"epoch": 0.06,
"grad_norm": 29.761442184448242,
"learning_rate": 8.984848484848484e-07,
"loss": 0.4407,
"step": 1110
},
{
"epoch": 0.06,
"grad_norm": 31.435007095336914,
"learning_rate": 8.974747474747474e-07,
"loss": 0.7207,
"step": 1120
},
{
"epoch": 0.06,
"grad_norm": 28.20315170288086,
"learning_rate": 8.964646464646465e-07,
"loss": 0.6879,
"step": 1130
},
{
"epoch": 0.06,
"grad_norm": 19.431041717529297,
"learning_rate": 8.954545454545454e-07,
"loss": 0.7292,
"step": 1140
},
{
"epoch": 0.06,
"grad_norm": 40.8221435546875,
"learning_rate": 8.944444444444445e-07,
"loss": 0.9112,
"step": 1150
},
{
"epoch": 0.06,
"grad_norm": 4.42428731918335,
"learning_rate": 8.934343434343434e-07,
"loss": 0.484,
"step": 1160
},
{
"epoch": 0.06,
"grad_norm": 24.146841049194336,
"learning_rate": 8.924242424242425e-07,
"loss": 0.4881,
"step": 1170
},
{
"epoch": 0.06,
"grad_norm": 52.02534866333008,
"learning_rate": 8.914141414141414e-07,
"loss": 0.8042,
"step": 1180
},
{
"epoch": 0.06,
"grad_norm": 26.81230926513672,
"learning_rate": 8.904040404040404e-07,
"loss": 0.6995,
"step": 1190
},
{
"epoch": 0.06,
"grad_norm": 9.775701522827148,
"learning_rate": 8.893939393939394e-07,
"loss": 0.8203,
"step": 1200
},
{
"epoch": 0.06,
"grad_norm": 20.65981674194336,
"learning_rate": 8.883838383838383e-07,
"loss": 0.9154,
"step": 1210
},
{
"epoch": 0.06,
"grad_norm": 20.086986541748047,
"learning_rate": 8.873737373737373e-07,
"loss": 0.4731,
"step": 1220
},
{
"epoch": 0.06,
"grad_norm": 20.6555233001709,
"learning_rate": 8.863636363636363e-07,
"loss": 0.7798,
"step": 1230
},
{
"epoch": 0.06,
"grad_norm": 13.60240364074707,
"learning_rate": 8.853535353535353e-07,
"loss": 0.6382,
"step": 1240
},
{
"epoch": 0.06,
"grad_norm": 20.39061737060547,
"learning_rate": 8.843434343434343e-07,
"loss": 0.6077,
"step": 1250
},
{
"epoch": 0.06,
"grad_norm": 14.251206398010254,
"learning_rate": 8.833333333333333e-07,
"loss": 0.5736,
"step": 1260
},
{
"epoch": 0.06,
"grad_norm": 63.768611907958984,
"learning_rate": 8.823232323232322e-07,
"loss": 0.7059,
"step": 1270
},
{
"epoch": 0.06,
"grad_norm": 44.81045150756836,
"learning_rate": 8.813131313131313e-07,
"loss": 0.6352,
"step": 1280
},
{
"epoch": 0.06,
"grad_norm": 15.952017784118652,
"learning_rate": 8.803030303030302e-07,
"loss": 0.7523,
"step": 1290
},
{
"epoch": 0.07,
"grad_norm": 27.234148025512695,
"learning_rate": 8.792929292929293e-07,
"loss": 0.4924,
"step": 1300
},
{
"epoch": 0.07,
"grad_norm": 44.29439163208008,
"learning_rate": 8.782828282828283e-07,
"loss": 0.6422,
"step": 1310
},
{
"epoch": 0.07,
"grad_norm": 35.106658935546875,
"learning_rate": 8.772727272727273e-07,
"loss": 0.655,
"step": 1320
},
{
"epoch": 0.07,
"grad_norm": 2.1618399620056152,
"learning_rate": 8.762626262626263e-07,
"loss": 0.6463,
"step": 1330
},
{
"epoch": 0.07,
"grad_norm": 36.30659866333008,
"learning_rate": 8.752525252525253e-07,
"loss": 0.5618,
"step": 1340
},
{
"epoch": 0.07,
"grad_norm": 18.705991744995117,
"learning_rate": 8.742424242424242e-07,
"loss": 0.5887,
"step": 1350
},
{
"epoch": 0.07,
"grad_norm": 2.828080654144287,
"learning_rate": 8.732323232323232e-07,
"loss": 0.6611,
"step": 1360
},
{
"epoch": 0.07,
"grad_norm": 47.979820251464844,
"learning_rate": 8.722222222222222e-07,
"loss": 0.4975,
"step": 1370
},
{
"epoch": 0.07,
"grad_norm": 25.263946533203125,
"learning_rate": 8.712121212121211e-07,
"loss": 0.761,
"step": 1380
},
{
"epoch": 0.07,
"grad_norm": 0.21978527307510376,
"learning_rate": 8.702020202020202e-07,
"loss": 0.5013,
"step": 1390
},
{
"epoch": 0.07,
"grad_norm": 15.600090026855469,
"learning_rate": 8.691919191919191e-07,
"loss": 0.5375,
"step": 1400
},
{
"epoch": 0.07,
"grad_norm": 25.815698623657227,
"learning_rate": 8.681818181818182e-07,
"loss": 0.8176,
"step": 1410
},
{
"epoch": 0.07,
"grad_norm": 44.83120346069336,
"learning_rate": 8.671717171717171e-07,
"loss": 0.5207,
"step": 1420
},
{
"epoch": 0.07,
"grad_norm": 20.984037399291992,
"learning_rate": 8.661616161616161e-07,
"loss": 0.5024,
"step": 1430
},
{
"epoch": 0.07,
"grad_norm": 26.290699005126953,
"learning_rate": 8.651515151515151e-07,
"loss": 0.7017,
"step": 1440
},
{
"epoch": 0.07,
"grad_norm": 23.3577880859375,
"learning_rate": 8.641414141414141e-07,
"loss": 0.6419,
"step": 1450
},
{
"epoch": 0.07,
"grad_norm": 21.74049949645996,
"learning_rate": 8.63131313131313e-07,
"loss": 0.5426,
"step": 1460
},
{
"epoch": 0.07,
"grad_norm": 0.5753927230834961,
"learning_rate": 8.62121212121212e-07,
"loss": 0.6417,
"step": 1470
},
{
"epoch": 0.07,
"grad_norm": 36.96406936645508,
"learning_rate": 8.611111111111111e-07,
"loss": 0.6999,
"step": 1480
},
{
"epoch": 0.07,
"grad_norm": 0.05758470296859741,
"learning_rate": 8.601010101010102e-07,
"loss": 0.816,
"step": 1490
},
{
"epoch": 0.07,
"grad_norm": 51.20552062988281,
"learning_rate": 8.590909090909091e-07,
"loss": 0.7171,
"step": 1500
},
{
"epoch": 0.08,
"grad_norm": 34.43236541748047,
"learning_rate": 8.58080808080808e-07,
"loss": 0.7448,
"step": 1510
},
{
"epoch": 0.08,
"grad_norm": 89.9273681640625,
"learning_rate": 8.570707070707071e-07,
"loss": 0.6324,
"step": 1520
},
{
"epoch": 0.08,
"grad_norm": 9.697078704833984,
"learning_rate": 8.56060606060606e-07,
"loss": 0.7951,
"step": 1530
},
{
"epoch": 0.08,
"grad_norm": 28.903240203857422,
"learning_rate": 8.55050505050505e-07,
"loss": 0.6224,
"step": 1540
},
{
"epoch": 0.08,
"grad_norm": 19.746826171875,
"learning_rate": 8.54040404040404e-07,
"loss": 0.4143,
"step": 1550
},
{
"epoch": 0.08,
"grad_norm": 33.706146240234375,
"learning_rate": 8.53030303030303e-07,
"loss": 0.6803,
"step": 1560
},
{
"epoch": 0.08,
"grad_norm": 24.144351959228516,
"learning_rate": 8.52020202020202e-07,
"loss": 0.6015,
"step": 1570
},
{
"epoch": 0.08,
"grad_norm": 34.93477249145508,
"learning_rate": 8.51010101010101e-07,
"loss": 0.5599,
"step": 1580
},
{
"epoch": 0.08,
"grad_norm": 31.863859176635742,
"learning_rate": 8.499999999999999e-07,
"loss": 0.7127,
"step": 1590
},
{
"epoch": 0.08,
"grad_norm": 48.534549713134766,
"learning_rate": 8.48989898989899e-07,
"loss": 0.7777,
"step": 1600
},
{
"epoch": 0.08,
"grad_norm": 35.08165740966797,
"learning_rate": 8.479797979797979e-07,
"loss": 0.6571,
"step": 1610
},
{
"epoch": 0.08,
"grad_norm": 33.532325744628906,
"learning_rate": 8.469696969696968e-07,
"loss": 0.7386,
"step": 1620
},
{
"epoch": 0.08,
"grad_norm": 0.014142200350761414,
"learning_rate": 8.459595959595959e-07,
"loss": 0.5505,
"step": 1630
},
{
"epoch": 0.08,
"grad_norm": 29.99919319152832,
"learning_rate": 8.449494949494948e-07,
"loss": 0.4274,
"step": 1640
},
{
"epoch": 0.08,
"grad_norm": 13.206497192382812,
"learning_rate": 8.43939393939394e-07,
"loss": 0.4399,
"step": 1650
},
{
"epoch": 0.08,
"grad_norm": 17.408052444458008,
"learning_rate": 8.429292929292929e-07,
"loss": 0.5262,
"step": 1660
},
{
"epoch": 0.08,
"grad_norm": 14.05802059173584,
"learning_rate": 8.419191919191919e-07,
"loss": 0.5503,
"step": 1670
},
{
"epoch": 0.08,
"grad_norm": 13.401651382446289,
"learning_rate": 8.409090909090909e-07,
"loss": 0.813,
"step": 1680
},
{
"epoch": 0.08,
"grad_norm": 28.40484619140625,
"learning_rate": 8.398989898989899e-07,
"loss": 0.7321,
"step": 1690
},
{
"epoch": 0.09,
"grad_norm": 39.133670806884766,
"learning_rate": 8.388888888888888e-07,
"loss": 0.7129,
"step": 1700
},
{
"epoch": 0.09,
"grad_norm": 26.99220848083496,
"learning_rate": 8.378787878787879e-07,
"loss": 0.645,
"step": 1710
},
{
"epoch": 0.09,
"grad_norm": 22.875778198242188,
"learning_rate": 8.368686868686868e-07,
"loss": 0.6779,
"step": 1720
},
{
"epoch": 0.09,
"grad_norm": 37.771705627441406,
"learning_rate": 8.358585858585859e-07,
"loss": 0.6104,
"step": 1730
},
{
"epoch": 0.09,
"grad_norm": 17.48222541809082,
"learning_rate": 8.348484848484848e-07,
"loss": 0.5982,
"step": 1740
},
{
"epoch": 0.09,
"grad_norm": 13.396077156066895,
"learning_rate": 8.338383838383838e-07,
"loss": 0.6295,
"step": 1750
},
{
"epoch": 0.09,
"grad_norm": 48.97739791870117,
"learning_rate": 8.328282828282828e-07,
"loss": 0.8439,
"step": 1760
},
{
"epoch": 0.09,
"grad_norm": 74.33853149414062,
"learning_rate": 8.318181818181817e-07,
"loss": 1.0566,
"step": 1770
},
{
"epoch": 0.09,
"grad_norm": 7.2910051345825195,
"learning_rate": 8.308080808080807e-07,
"loss": 0.5226,
"step": 1780
},
{
"epoch": 0.09,
"grad_norm": 27.389278411865234,
"learning_rate": 8.297979797979797e-07,
"loss": 1.1226,
"step": 1790
},
{
"epoch": 0.09,
"grad_norm": 9.284111976623535,
"learning_rate": 8.287878787878787e-07,
"loss": 0.4101,
"step": 1800
},
{
"epoch": 0.09,
"grad_norm": 8.962563514709473,
"learning_rate": 8.277777777777777e-07,
"loss": 0.5349,
"step": 1810
},
{
"epoch": 0.09,
"grad_norm": 47.56216049194336,
"learning_rate": 8.267676767676768e-07,
"loss": 0.6863,
"step": 1820
},
{
"epoch": 0.09,
"grad_norm": 31.547109603881836,
"learning_rate": 8.257575757575757e-07,
"loss": 0.7484,
"step": 1830
},
{
"epoch": 0.09,
"grad_norm": 8.890266418457031,
"learning_rate": 8.247474747474748e-07,
"loss": 0.7518,
"step": 1840
},
{
"epoch": 0.09,
"grad_norm": 25.44080352783203,
"learning_rate": 8.237373737373737e-07,
"loss": 0.8552,
"step": 1850
},
{
"epoch": 0.09,
"grad_norm": 19.411556243896484,
"learning_rate": 8.227272727272727e-07,
"loss": 0.6285,
"step": 1860
},
{
"epoch": 0.09,
"grad_norm": 5.5483574867248535,
"learning_rate": 8.217171717171717e-07,
"loss": 0.5478,
"step": 1870
},
{
"epoch": 0.09,
"grad_norm": 22.145336151123047,
"learning_rate": 8.207070707070707e-07,
"loss": 0.7499,
"step": 1880
},
{
"epoch": 0.09,
"grad_norm": 30.043731689453125,
"learning_rate": 8.196969696969697e-07,
"loss": 0.7252,
"step": 1890
},
{
"epoch": 0.1,
"grad_norm": 39.9935417175293,
"learning_rate": 8.186868686868687e-07,
"loss": 0.7348,
"step": 1900
},
{
"epoch": 0.1,
"grad_norm": 5.8807525634765625,
"learning_rate": 8.176767676767676e-07,
"loss": 0.381,
"step": 1910
},
{
"epoch": 0.1,
"grad_norm": 16.082597732543945,
"learning_rate": 8.166666666666666e-07,
"loss": 0.6413,
"step": 1920
},
{
"epoch": 0.1,
"grad_norm": 15.713370323181152,
"learning_rate": 8.156565656565656e-07,
"loss": 0.4765,
"step": 1930
},
{
"epoch": 0.1,
"grad_norm": 43.45647048950195,
"learning_rate": 8.146464646464645e-07,
"loss": 0.6653,
"step": 1940
},
{
"epoch": 0.1,
"grad_norm": 51.137046813964844,
"learning_rate": 8.136363636363636e-07,
"loss": 0.6915,
"step": 1950
},
{
"epoch": 0.1,
"grad_norm": 33.2936897277832,
"learning_rate": 8.126262626262625e-07,
"loss": 0.4993,
"step": 1960
},
{
"epoch": 0.1,
"grad_norm": 10.87304973602295,
"learning_rate": 8.116161616161616e-07,
"loss": 0.7893,
"step": 1970
},
{
"epoch": 0.1,
"grad_norm": 0.774446427822113,
"learning_rate": 8.106060606060605e-07,
"loss": 0.5946,
"step": 1980
},
{
"epoch": 0.1,
"grad_norm": 26.221094131469727,
"learning_rate": 8.095959595959596e-07,
"loss": 0.8534,
"step": 1990
},
{
"epoch": 0.1,
"grad_norm": 46.51724624633789,
"learning_rate": 8.085858585858586e-07,
"loss": 0.4862,
"step": 2000
},
{
"epoch": 0.1,
"eval_loss": 0.6236868500709534,
"eval_runtime": 274.2428,
"eval_samples_per_second": 3.646,
"eval_steps_per_second": 3.646,
"step": 2000
},
{
"epoch": 0.1,
"grad_norm": 15.500929832458496,
"learning_rate": 8.075757575757576e-07,
"loss": 0.6197,
"step": 2010
},
{
"epoch": 0.1,
"grad_norm": 7.338476657867432,
"learning_rate": 8.065656565656565e-07,
"loss": 0.591,
"step": 2020
},
{
"epoch": 0.1,
"grad_norm": 28.497867584228516,
"learning_rate": 8.055555555555556e-07,
"loss": 0.5593,
"step": 2030
},
{
"epoch": 0.1,
"grad_norm": 15.371612548828125,
"learning_rate": 8.045454545454545e-07,
"loss": 0.53,
"step": 2040
},
{
"epoch": 0.1,
"grad_norm": 38.33600616455078,
"learning_rate": 8.035353535353536e-07,
"loss": 1.0264,
"step": 2050
},
{
"epoch": 0.1,
"grad_norm": 17.20992088317871,
"learning_rate": 8.025252525252525e-07,
"loss": 0.4638,
"step": 2060
},
{
"epoch": 0.1,
"grad_norm": 58.250579833984375,
"learning_rate": 8.015151515151514e-07,
"loss": 0.5415,
"step": 2070
},
{
"epoch": 0.1,
"grad_norm": 20.2476806640625,
"learning_rate": 8.005050505050505e-07,
"loss": 0.5162,
"step": 2080
},
{
"epoch": 0.1,
"grad_norm": 27.523902893066406,
"learning_rate": 7.994949494949494e-07,
"loss": 0.4223,
"step": 2090
},
{
"epoch": 0.1,
"grad_norm": 25.663818359375,
"learning_rate": 7.984848484848484e-07,
"loss": 0.6016,
"step": 2100
},
{
"epoch": 0.11,
"grad_norm": 25.809614181518555,
"learning_rate": 7.974747474747474e-07,
"loss": 1.0505,
"step": 2110
},
{
"epoch": 0.11,
"grad_norm": 52.98808670043945,
"learning_rate": 7.964646464646464e-07,
"loss": 1.1373,
"step": 2120
},
{
"epoch": 0.11,
"grad_norm": 15.879914283752441,
"learning_rate": 7.954545454545454e-07,
"loss": 0.5527,
"step": 2130
},
{
"epoch": 0.11,
"grad_norm": 16.313142776489258,
"learning_rate": 7.944444444444444e-07,
"loss": 0.4782,
"step": 2140
},
{
"epoch": 0.11,
"grad_norm": 39.388511657714844,
"learning_rate": 7.934343434343433e-07,
"loss": 0.9302,
"step": 2150
},
{
"epoch": 0.11,
"grad_norm": 27.657390594482422,
"learning_rate": 7.924242424242425e-07,
"loss": 0.4342,
"step": 2160
},
{
"epoch": 0.11,
"grad_norm": 3.85066819190979,
"learning_rate": 7.914141414141414e-07,
"loss": 0.7398,
"step": 2170
},
{
"epoch": 0.11,
"grad_norm": 20.403371810913086,
"learning_rate": 7.904040404040404e-07,
"loss": 0.7303,
"step": 2180
},
{
"epoch": 0.11,
"grad_norm": 19.42263412475586,
"learning_rate": 7.893939393939394e-07,
"loss": 0.7445,
"step": 2190
},
{
"epoch": 0.11,
"grad_norm": 21.36237144470215,
"learning_rate": 7.883838383838383e-07,
"loss": 0.6936,
"step": 2200
},
{
"epoch": 0.11,
"grad_norm": 27.883874893188477,
"learning_rate": 7.873737373737374e-07,
"loss": 0.6665,
"step": 2210
},
{
"epoch": 0.11,
"grad_norm": 16.943950653076172,
"learning_rate": 7.863636363636363e-07,
"loss": 0.6188,
"step": 2220
},
{
"epoch": 0.11,
"grad_norm": 117.27821350097656,
"learning_rate": 7.853535353535353e-07,
"loss": 0.4547,
"step": 2230
},
{
"epoch": 0.11,
"grad_norm": 16.697284698486328,
"learning_rate": 7.843434343434343e-07,
"loss": 0.3462,
"step": 2240
},
{
"epoch": 0.11,
"grad_norm": 9.313762664794922,
"learning_rate": 7.833333333333333e-07,
"loss": 0.5729,
"step": 2250
},
{
"epoch": 0.11,
"grad_norm": 43.60643005371094,
"learning_rate": 7.823232323232322e-07,
"loss": 0.8304,
"step": 2260
},
{
"epoch": 0.11,
"grad_norm": 22.356792449951172,
"learning_rate": 7.813131313131313e-07,
"loss": 0.5437,
"step": 2270
},
{
"epoch": 0.11,
"grad_norm": 32.53278732299805,
"learning_rate": 7.803030303030302e-07,
"loss": 1.0315,
"step": 2280
},
{
"epoch": 0.11,
"grad_norm": 46.504573822021484,
"learning_rate": 7.792929292929293e-07,
"loss": 0.6465,
"step": 2290
},
{
"epoch": 0.12,
"grad_norm": 7.453057765960693,
"learning_rate": 7.782828282828282e-07,
"loss": 0.6023,
"step": 2300
},
{
"epoch": 0.12,
"grad_norm": 73.96940612792969,
"learning_rate": 7.772727272727272e-07,
"loss": 0.7558,
"step": 2310
},
{
"epoch": 0.12,
"grad_norm": 31.267314910888672,
"learning_rate": 7.762626262626262e-07,
"loss": 0.527,
"step": 2320
},
{
"epoch": 0.12,
"grad_norm": 41.19007873535156,
"learning_rate": 7.752525252525253e-07,
"loss": 0.7043,
"step": 2330
},
{
"epoch": 0.12,
"grad_norm": 21.09952735900879,
"learning_rate": 7.742424242424243e-07,
"loss": 0.9584,
"step": 2340
},
{
"epoch": 0.12,
"grad_norm": 42.14522933959961,
"learning_rate": 7.732323232323232e-07,
"loss": 0.5388,
"step": 2350
},
{
"epoch": 0.12,
"grad_norm": 13.435091972351074,
"learning_rate": 7.722222222222222e-07,
"loss": 0.6348,
"step": 2360
},
{
"epoch": 0.12,
"grad_norm": 26.72304916381836,
"learning_rate": 7.712121212121212e-07,
"loss": 0.3811,
"step": 2370
},
{
"epoch": 0.12,
"grad_norm": 43.16832733154297,
"learning_rate": 7.702020202020202e-07,
"loss": 0.7426,
"step": 2380
},
{
"epoch": 0.12,
"grad_norm": 3.5628132820129395,
"learning_rate": 7.691919191919191e-07,
"loss": 0.4845,
"step": 2390
},
{
"epoch": 0.12,
"grad_norm": 52.14829635620117,
"learning_rate": 7.681818181818182e-07,
"loss": 0.6397,
"step": 2400
},
{
"epoch": 0.12,
"grad_norm": 33.835205078125,
"learning_rate": 7.671717171717171e-07,
"loss": 0.6927,
"step": 2410
},
{
"epoch": 0.12,
"grad_norm": 33.98677444458008,
"learning_rate": 7.661616161616161e-07,
"loss": 0.6525,
"step": 2420
},
{
"epoch": 0.12,
"grad_norm": 23.454181671142578,
"learning_rate": 7.651515151515151e-07,
"loss": 0.7971,
"step": 2430
},
{
"epoch": 0.12,
"grad_norm": 39.06905746459961,
"learning_rate": 7.641414141414141e-07,
"loss": 0.516,
"step": 2440
},
{
"epoch": 0.12,
"grad_norm": 36.33759307861328,
"learning_rate": 7.631313131313131e-07,
"loss": 0.6733,
"step": 2450
},
{
"epoch": 0.12,
"grad_norm": 39.87421417236328,
"learning_rate": 7.621212121212121e-07,
"loss": 0.827,
"step": 2460
},
{
"epoch": 0.12,
"grad_norm": 27.0250244140625,
"learning_rate": 7.61111111111111e-07,
"loss": 0.7167,
"step": 2470
},
{
"epoch": 0.12,
"grad_norm": 42.34687042236328,
"learning_rate": 7.6010101010101e-07,
"loss": 0.8127,
"step": 2480
},
{
"epoch": 0.12,
"grad_norm": 20.025238037109375,
"learning_rate": 7.59090909090909e-07,
"loss": 0.5936,
"step": 2490
},
{
"epoch": 0.12,
"grad_norm": 2.185176372528076,
"learning_rate": 7.580808080808081e-07,
"loss": 0.4552,
"step": 2500
},
{
"epoch": 0.13,
"grad_norm": 10.323554039001465,
"learning_rate": 7.570707070707071e-07,
"loss": 0.6821,
"step": 2510
},
{
"epoch": 0.13,
"grad_norm": 59.452205657958984,
"learning_rate": 7.56060606060606e-07,
"loss": 0.8375,
"step": 2520
},
{
"epoch": 0.13,
"grad_norm": 0.1424858570098877,
"learning_rate": 7.550505050505051e-07,
"loss": 0.3893,
"step": 2530
},
{
"epoch": 0.13,
"grad_norm": 19.907209396362305,
"learning_rate": 7.54040404040404e-07,
"loss": 0.6082,
"step": 2540
},
{
"epoch": 0.13,
"grad_norm": 3.8457748889923096,
"learning_rate": 7.53030303030303e-07,
"loss": 0.7132,
"step": 2550
},
{
"epoch": 0.13,
"grad_norm": 12.082324028015137,
"learning_rate": 7.52020202020202e-07,
"loss": 0.7951,
"step": 2560
},
{
"epoch": 0.13,
"grad_norm": 24.198322296142578,
"learning_rate": 7.51010101010101e-07,
"loss": 0.5836,
"step": 2570
},
{
"epoch": 0.13,
"grad_norm": 25.63511085510254,
"learning_rate": 7.5e-07,
"loss": 0.6352,
"step": 2580
},
{
"epoch": 0.13,
"grad_norm": 24.314882278442383,
"learning_rate": 7.48989898989899e-07,
"loss": 0.7092,
"step": 2590
},
{
"epoch": 0.13,
"grad_norm": 10.859787940979004,
"learning_rate": 7.479797979797979e-07,
"loss": 0.5931,
"step": 2600
},
{
"epoch": 0.13,
"grad_norm": 13.863005638122559,
"learning_rate": 7.46969696969697e-07,
"loss": 0.5174,
"step": 2610
},
{
"epoch": 0.13,
"grad_norm": 22.458776473999023,
"learning_rate": 7.459595959595959e-07,
"loss": 0.4968,
"step": 2620
},
{
"epoch": 0.13,
"grad_norm": 49.36175537109375,
"learning_rate": 7.449494949494948e-07,
"loss": 0.6513,
"step": 2630
},
{
"epoch": 0.13,
"grad_norm": 7.616428375244141,
"learning_rate": 7.439393939393939e-07,
"loss": 0.4914,
"step": 2640
},
{
"epoch": 0.13,
"grad_norm": 4.416635036468506,
"learning_rate": 7.429292929292928e-07,
"loss": 0.2537,
"step": 2650
},
{
"epoch": 0.13,
"grad_norm": 16.390323638916016,
"learning_rate": 7.419191919191918e-07,
"loss": 0.7159,
"step": 2660
},
{
"epoch": 0.13,
"grad_norm": 5.848669052124023,
"learning_rate": 7.409090909090909e-07,
"loss": 0.4026,
"step": 2670
},
{
"epoch": 0.13,
"grad_norm": 26.498138427734375,
"learning_rate": 7.398989898989899e-07,
"loss": 0.5316,
"step": 2680
},
{
"epoch": 0.13,
"grad_norm": 20.28061866760254,
"learning_rate": 7.388888888888889e-07,
"loss": 0.8416,
"step": 2690
},
{
"epoch": 0.14,
"grad_norm": 29.66952133178711,
"learning_rate": 7.378787878787879e-07,
"loss": 0.6887,
"step": 2700
},
{
"epoch": 0.14,
"grad_norm": 18.624435424804688,
"learning_rate": 7.368686868686868e-07,
"loss": 0.4513,
"step": 2710
},
{
"epoch": 0.14,
"grad_norm": 22.85711669921875,
"learning_rate": 7.358585858585859e-07,
"loss": 0.894,
"step": 2720
},
{
"epoch": 0.14,
"grad_norm": 30.069570541381836,
"learning_rate": 7.348484848484848e-07,
"loss": 0.8273,
"step": 2730
},
{
"epoch": 0.14,
"grad_norm": 52.604408264160156,
"learning_rate": 7.338383838383839e-07,
"loss": 0.6306,
"step": 2740
},
{
"epoch": 0.14,
"grad_norm": 30.587234497070312,
"learning_rate": 7.328282828282828e-07,
"loss": 0.7653,
"step": 2750
},
{
"epoch": 0.14,
"grad_norm": 25.937252044677734,
"learning_rate": 7.318181818181818e-07,
"loss": 0.7205,
"step": 2760
},
{
"epoch": 0.14,
"grad_norm": 18.365734100341797,
"learning_rate": 7.308080808080808e-07,
"loss": 0.4825,
"step": 2770
},
{
"epoch": 0.14,
"grad_norm": 17.263690948486328,
"learning_rate": 7.297979797979797e-07,
"loss": 0.7032,
"step": 2780
},
{
"epoch": 0.14,
"grad_norm": 34.739078521728516,
"learning_rate": 7.287878787878787e-07,
"loss": 0.463,
"step": 2790
},
{
"epoch": 0.14,
"grad_norm": 10.641708374023438,
"learning_rate": 7.277777777777777e-07,
"loss": 0.4351,
"step": 2800
},
{
"epoch": 0.14,
"grad_norm": 75.48184967041016,
"learning_rate": 7.267676767676767e-07,
"loss": 0.7079,
"step": 2810
},
{
"epoch": 0.14,
"grad_norm": 45.05419921875,
"learning_rate": 7.257575757575756e-07,
"loss": 0.5934,
"step": 2820
},
{
"epoch": 0.14,
"grad_norm": 29.237016677856445,
"learning_rate": 7.247474747474747e-07,
"loss": 0.4162,
"step": 2830
},
{
"epoch": 0.14,
"grad_norm": 0.004866959527134895,
"learning_rate": 7.237373737373737e-07,
"loss": 0.7245,
"step": 2840
},
{
"epoch": 0.14,
"grad_norm": 128.69998168945312,
"learning_rate": 7.227272727272728e-07,
"loss": 0.4543,
"step": 2850
},
{
"epoch": 0.14,
"grad_norm": 0.031988680362701416,
"learning_rate": 7.217171717171717e-07,
"loss": 0.6369,
"step": 2860
},
{
"epoch": 0.14,
"grad_norm": 23.29071807861328,
"learning_rate": 7.207070707070707e-07,
"loss": 0.8627,
"step": 2870
},
{
"epoch": 0.14,
"grad_norm": 16.787782669067383,
"learning_rate": 7.196969696969697e-07,
"loss": 0.7068,
"step": 2880
},
{
"epoch": 0.14,
"grad_norm": 27.934904098510742,
"learning_rate": 7.186868686868687e-07,
"loss": 0.6029,
"step": 2890
},
{
"epoch": 0.14,
"grad_norm": 28.06117057800293,
"learning_rate": 7.176767676767677e-07,
"loss": 0.4879,
"step": 2900
},
{
"epoch": 0.15,
"grad_norm": 13.214853286743164,
"learning_rate": 7.166666666666667e-07,
"loss": 0.4323,
"step": 2910
},
{
"epoch": 0.15,
"grad_norm": 4.088837146759033,
"learning_rate": 7.156565656565656e-07,
"loss": 0.5254,
"step": 2920
},
{
"epoch": 0.15,
"grad_norm": 29.167570114135742,
"learning_rate": 7.146464646464646e-07,
"loss": 0.7426,
"step": 2930
},
{
"epoch": 0.15,
"grad_norm": 55.35223388671875,
"learning_rate": 7.136363636363636e-07,
"loss": 0.5928,
"step": 2940
},
{
"epoch": 0.15,
"grad_norm": 46.83303451538086,
"learning_rate": 7.126262626262625e-07,
"loss": 0.5281,
"step": 2950
},
{
"epoch": 0.15,
"grad_norm": 62.05622482299805,
"learning_rate": 7.116161616161616e-07,
"loss": 0.6006,
"step": 2960
},
{
"epoch": 0.15,
"grad_norm": 47.03242874145508,
"learning_rate": 7.106060606060605e-07,
"loss": 0.5264,
"step": 2970
},
{
"epoch": 0.15,
"grad_norm": 41.88718032836914,
"learning_rate": 7.095959595959596e-07,
"loss": 0.5463,
"step": 2980
},
{
"epoch": 0.15,
"grad_norm": 10.629190444946289,
"learning_rate": 7.085858585858585e-07,
"loss": 0.4873,
"step": 2990
},
{
"epoch": 0.15,
"grad_norm": 34.29298400878906,
"learning_rate": 7.075757575757575e-07,
"loss": 0.5327,
"step": 3000
},
{
"epoch": 0.15,
"eval_loss": 0.6308945417404175,
"eval_runtime": 273.2852,
"eval_samples_per_second": 3.659,
"eval_steps_per_second": 3.659,
"step": 3000
},
{
"epoch": 0.15,
"grad_norm": 57.80482482910156,
"learning_rate": 7.065656565656566e-07,
"loss": 0.5131,
"step": 3010
},
{
"epoch": 0.15,
"grad_norm": 15.977564811706543,
"learning_rate": 7.055555555555556e-07,
"loss": 0.7116,
"step": 3020
},
{
"epoch": 0.15,
"grad_norm": 24.813589096069336,
"learning_rate": 7.045454545454545e-07,
"loss": 0.8132,
"step": 3030
},
{
"epoch": 0.15,
"grad_norm": 22.984556198120117,
"learning_rate": 7.035353535353536e-07,
"loss": 0.9015,
"step": 3040
},
{
"epoch": 0.15,
"grad_norm": 15.78228759765625,
"learning_rate": 7.025252525252525e-07,
"loss": 0.5522,
"step": 3050
},
{
"epoch": 0.15,
"grad_norm": 19.144140243530273,
"learning_rate": 7.015151515151516e-07,
"loss": 0.5387,
"step": 3060
},
{
"epoch": 0.15,
"grad_norm": 47.44633483886719,
"learning_rate": 7.005050505050505e-07,
"loss": 0.5754,
"step": 3070
},
{
"epoch": 0.15,
"grad_norm": 36.51036834716797,
"learning_rate": 6.994949494949494e-07,
"loss": 0.8067,
"step": 3080
},
{
"epoch": 0.15,
"grad_norm": 0.8666948080062866,
"learning_rate": 6.984848484848485e-07,
"loss": 0.6058,
"step": 3090
},
{
"epoch": 0.15,
"grad_norm": 23.022371292114258,
"learning_rate": 6.974747474747474e-07,
"loss": 0.6043,
"step": 3100
},
{
"epoch": 0.16,
"grad_norm": 42.4169807434082,
"learning_rate": 6.964646464646464e-07,
"loss": 0.5526,
"step": 3110
},
{
"epoch": 0.16,
"grad_norm": 65.37395477294922,
"learning_rate": 6.954545454545454e-07,
"loss": 0.5219,
"step": 3120
},
{
"epoch": 0.16,
"grad_norm": 18.157527923583984,
"learning_rate": 6.944444444444444e-07,
"loss": 0.6028,
"step": 3130
},
{
"epoch": 0.16,
"grad_norm": 52.82014846801758,
"learning_rate": 6.934343434343434e-07,
"loss": 0.7391,
"step": 3140
},
{
"epoch": 0.16,
"grad_norm": 19.03993034362793,
"learning_rate": 6.924242424242424e-07,
"loss": 0.9328,
"step": 3150
},
{
"epoch": 0.16,
"grad_norm": 38.07152557373047,
"learning_rate": 6.914141414141413e-07,
"loss": 0.677,
"step": 3160
},
{
"epoch": 0.16,
"grad_norm": 10.974771499633789,
"learning_rate": 6.904040404040404e-07,
"loss": 0.638,
"step": 3170
},
{
"epoch": 0.16,
"grad_norm": 33.91596221923828,
"learning_rate": 6.894949494949494e-07,
"loss": 0.9438,
"step": 3180
},
{
"epoch": 0.16,
"grad_norm": 48.037166595458984,
"learning_rate": 6.884848484848485e-07,
"loss": 0.4394,
"step": 3190
},
{
"epoch": 0.16,
"grad_norm": 28.13571548461914,
"learning_rate": 6.874747474747474e-07,
"loss": 0.4743,
"step": 3200
},
{
"epoch": 0.16,
"grad_norm": 35.86332702636719,
"learning_rate": 6.864646464646464e-07,
"loss": 0.5854,
"step": 3210
},
{
"epoch": 0.16,
"grad_norm": 28.57064437866211,
"learning_rate": 6.854545454545454e-07,
"loss": 0.9211,
"step": 3220
},
{
"epoch": 0.16,
"grad_norm": 41.90704345703125,
"learning_rate": 6.844444444444444e-07,
"loss": 0.6961,
"step": 3230
},
{
"epoch": 0.16,
"grad_norm": 11.663514137268066,
"learning_rate": 6.834343434343434e-07,
"loss": 0.6082,
"step": 3240
},
{
"epoch": 0.16,
"grad_norm": 0.3548143804073334,
"learning_rate": 6.824242424242424e-07,
"loss": 0.5743,
"step": 3250
},
{
"epoch": 0.16,
"grad_norm": 49.9235954284668,
"learning_rate": 6.814141414141413e-07,
"loss": 0.8361,
"step": 3260
},
{
"epoch": 0.16,
"grad_norm": 63.49104309082031,
"learning_rate": 6.804040404040405e-07,
"loss": 0.6426,
"step": 3270
},
{
"epoch": 0.16,
"grad_norm": 34.379417419433594,
"learning_rate": 6.793939393939394e-07,
"loss": 0.6631,
"step": 3280
},
{
"epoch": 0.16,
"grad_norm": 38.65358352661133,
"learning_rate": 6.783838383838383e-07,
"loss": 0.7235,
"step": 3290
},
{
"epoch": 0.17,
"grad_norm": 65.52446746826172,
"learning_rate": 6.773737373737374e-07,
"loss": 0.718,
"step": 3300
},
{
"epoch": 0.17,
"grad_norm": 11.010351181030273,
"learning_rate": 6.763636363636363e-07,
"loss": 0.7492,
"step": 3310
},
{
"epoch": 0.17,
"grad_norm": 0.8809446096420288,
"learning_rate": 6.753535353535354e-07,
"loss": 0.685,
"step": 3320
},
{
"epoch": 0.17,
"grad_norm": 18.255809783935547,
"learning_rate": 6.743434343434343e-07,
"loss": 0.5533,
"step": 3330
},
{
"epoch": 0.17,
"grad_norm": 26.685049057006836,
"learning_rate": 6.733333333333333e-07,
"loss": 0.5235,
"step": 3340
},
{
"epoch": 0.17,
"grad_norm": 249.94290161132812,
"learning_rate": 6.723232323232323e-07,
"loss": 0.6984,
"step": 3350
},
{
"epoch": 0.17,
"grad_norm": 45.482479095458984,
"learning_rate": 6.713131313131313e-07,
"loss": 0.4921,
"step": 3360
},
{
"epoch": 0.17,
"grad_norm": 19.996156692504883,
"learning_rate": 6.703030303030302e-07,
"loss": 0.4737,
"step": 3370
},
{
"epoch": 0.17,
"grad_norm": 155.62306213378906,
"learning_rate": 6.692929292929293e-07,
"loss": 0.6733,
"step": 3380
},
{
"epoch": 0.17,
"grad_norm": 35.31591033935547,
"learning_rate": 6.682828282828282e-07,
"loss": 0.5598,
"step": 3390
},
{
"epoch": 0.17,
"grad_norm": 32.30759048461914,
"learning_rate": 6.672727272727273e-07,
"loss": 0.7611,
"step": 3400
},
{
"epoch": 0.17,
"grad_norm": 30.439285278320312,
"learning_rate": 6.662626262626262e-07,
"loss": 0.8177,
"step": 3410
},
{
"epoch": 0.17,
"grad_norm": 39.084266662597656,
"learning_rate": 6.652525252525251e-07,
"loss": 0.683,
"step": 3420
},
{
"epoch": 0.17,
"grad_norm": 28.271728515625,
"learning_rate": 6.642424242424242e-07,
"loss": 0.5019,
"step": 3430
},
{
"epoch": 0.17,
"grad_norm": 0.6252301931381226,
"learning_rate": 6.632323232323232e-07,
"loss": 0.4204,
"step": 3440
},
{
"epoch": 0.17,
"grad_norm": 39.03291702270508,
"learning_rate": 6.622222222222222e-07,
"loss": 0.4508,
"step": 3450
},
{
"epoch": 0.17,
"grad_norm": 4.38857364654541,
"learning_rate": 6.612121212121212e-07,
"loss": 0.5938,
"step": 3460
},
{
"epoch": 0.17,
"grad_norm": 24.514738082885742,
"learning_rate": 6.602020202020202e-07,
"loss": 0.7128,
"step": 3470
},
{
"epoch": 0.17,
"grad_norm": 45.85287857055664,
"learning_rate": 6.591919191919192e-07,
"loss": 0.7125,
"step": 3480
},
{
"epoch": 0.17,
"grad_norm": 0.022573018446564674,
"learning_rate": 6.581818181818182e-07,
"loss": 0.4128,
"step": 3490
},
{
"epoch": 0.17,
"grad_norm": 21.102893829345703,
"learning_rate": 6.571717171717171e-07,
"loss": 0.5341,
"step": 3500
},
{
"epoch": 0.18,
"grad_norm": 27.609329223632812,
"learning_rate": 6.561616161616162e-07,
"loss": 0.4808,
"step": 3510
},
{
"epoch": 0.18,
"grad_norm": 28.081892013549805,
"learning_rate": 6.551515151515151e-07,
"loss": 0.4426,
"step": 3520
},
{
"epoch": 0.18,
"grad_norm": 35.72859573364258,
"learning_rate": 6.541414141414141e-07,
"loss": 0.5072,
"step": 3530
},
{
"epoch": 0.18,
"grad_norm": 31.80050277709961,
"learning_rate": 6.531313131313131e-07,
"loss": 0.5102,
"step": 3540
},
{
"epoch": 0.18,
"grad_norm": 3.7258970737457275,
"learning_rate": 6.52121212121212e-07,
"loss": 0.4568,
"step": 3550
},
{
"epoch": 0.18,
"grad_norm": 23.297748565673828,
"learning_rate": 6.511111111111111e-07,
"loss": 0.5721,
"step": 3560
},
{
"epoch": 0.18,
"grad_norm": 63.63188171386719,
"learning_rate": 6.5010101010101e-07,
"loss": 0.6361,
"step": 3570
},
{
"epoch": 0.18,
"grad_norm": 38.485416412353516,
"learning_rate": 6.49090909090909e-07,
"loss": 0.4438,
"step": 3580
},
{
"epoch": 0.18,
"grad_norm": 40.09241485595703,
"learning_rate": 6.48080808080808e-07,
"loss": 0.732,
"step": 3590
},
{
"epoch": 0.18,
"grad_norm": 49.900917053222656,
"learning_rate": 6.47070707070707e-07,
"loss": 0.6085,
"step": 3600
},
{
"epoch": 0.18,
"grad_norm": 26.581472396850586,
"learning_rate": 6.46060606060606e-07,
"loss": 0.7013,
"step": 3610
},
{
"epoch": 0.18,
"grad_norm": 9.770761489868164,
"learning_rate": 6.450505050505051e-07,
"loss": 0.4664,
"step": 3620
},
{
"epoch": 0.18,
"grad_norm": 31.70376205444336,
"learning_rate": 6.44040404040404e-07,
"loss": 0.6208,
"step": 3630
},
{
"epoch": 0.18,
"grad_norm": 37.74061965942383,
"learning_rate": 6.430303030303031e-07,
"loss": 0.6137,
"step": 3640
},
{
"epoch": 0.18,
"grad_norm": 33.57075119018555,
"learning_rate": 6.42020202020202e-07,
"loss": 0.591,
"step": 3650
},
{
"epoch": 0.18,
"grad_norm": 52.23899841308594,
"learning_rate": 6.41010101010101e-07,
"loss": 0.8013,
"step": 3660
},
{
"epoch": 0.18,
"grad_norm": 62.8396110534668,
"learning_rate": 6.4e-07,
"loss": 0.7005,
"step": 3670
},
{
"epoch": 0.18,
"grad_norm": 36.724098205566406,
"learning_rate": 6.38989898989899e-07,
"loss": 0.8353,
"step": 3680
},
{
"epoch": 0.18,
"grad_norm": 2.098162889480591,
"learning_rate": 6.379797979797979e-07,
"loss": 0.5307,
"step": 3690
},
{
"epoch": 0.18,
"grad_norm": 41.13184356689453,
"learning_rate": 6.36969696969697e-07,
"loss": 0.5198,
"step": 3700
},
{
"epoch": 0.19,
"grad_norm": 46.46861267089844,
"learning_rate": 6.359595959595959e-07,
"loss": 0.4319,
"step": 3710
},
{
"epoch": 0.19,
"grad_norm": 65.68028259277344,
"learning_rate": 6.349494949494949e-07,
"loss": 0.5386,
"step": 3720
},
{
"epoch": 0.19,
"grad_norm": 5.740784168243408,
"learning_rate": 6.339393939393939e-07,
"loss": 0.6834,
"step": 3730
},
{
"epoch": 0.19,
"grad_norm": 15.918974876403809,
"learning_rate": 6.329292929292928e-07,
"loss": 0.6303,
"step": 3740
},
{
"epoch": 0.19,
"grad_norm": 5.351881980895996,
"learning_rate": 6.319191919191919e-07,
"loss": 0.3967,
"step": 3750
},
{
"epoch": 0.19,
"grad_norm": 76.57394409179688,
"learning_rate": 6.309090909090908e-07,
"loss": 0.5537,
"step": 3760
},
{
"epoch": 0.19,
"grad_norm": 17.062864303588867,
"learning_rate": 6.298989898989898e-07,
"loss": 0.6029,
"step": 3770
},
{
"epoch": 0.19,
"grad_norm": 39.9703254699707,
"learning_rate": 6.288888888888889e-07,
"loss": 0.3583,
"step": 3780
},
{
"epoch": 0.19,
"grad_norm": 61.81090545654297,
"learning_rate": 6.278787878787879e-07,
"loss": 0.5372,
"step": 3790
},
{
"epoch": 0.19,
"grad_norm": 25.4251708984375,
"learning_rate": 6.268686868686869e-07,
"loss": 0.4157,
"step": 3800
},
{
"epoch": 0.19,
"grad_norm": 3.276371479034424,
"learning_rate": 6.258585858585859e-07,
"loss": 0.6531,
"step": 3810
},
{
"epoch": 0.19,
"grad_norm": 47.67403030395508,
"learning_rate": 6.248484848484848e-07,
"loss": 0.6813,
"step": 3820
},
{
"epoch": 0.19,
"grad_norm": 13.192293167114258,
"learning_rate": 6.238383838383839e-07,
"loss": 0.4612,
"step": 3830
},
{
"epoch": 0.19,
"grad_norm": 0.580405056476593,
"learning_rate": 6.228282828282828e-07,
"loss": 0.3817,
"step": 3840
},
{
"epoch": 0.19,
"grad_norm": 17.856048583984375,
"learning_rate": 6.218181818181817e-07,
"loss": 0.7929,
"step": 3850
},
{
"epoch": 0.19,
"grad_norm": 33.620033264160156,
"learning_rate": 6.208080808080808e-07,
"loss": 0.5922,
"step": 3860
},
{
"epoch": 0.19,
"grad_norm": 33.94918441772461,
"learning_rate": 6.197979797979797e-07,
"loss": 0.4963,
"step": 3870
},
{
"epoch": 0.19,
"grad_norm": 15.053759574890137,
"learning_rate": 6.187878787878788e-07,
"loss": 0.6975,
"step": 3880
},
{
"epoch": 0.19,
"grad_norm": 44.79494857788086,
"learning_rate": 6.177777777777777e-07,
"loss": 0.6962,
"step": 3890
},
{
"epoch": 0.2,
"grad_norm": 9.810379028320312,
"learning_rate": 6.167676767676767e-07,
"loss": 0.3916,
"step": 3900
},
{
"epoch": 0.2,
"grad_norm": 54.83355712890625,
"learning_rate": 6.157575757575757e-07,
"loss": 0.6929,
"step": 3910
},
{
"epoch": 0.2,
"grad_norm": 20.908220291137695,
"learning_rate": 6.147474747474747e-07,
"loss": 0.7343,
"step": 3920
},
{
"epoch": 0.2,
"grad_norm": 48.0946044921875,
"learning_rate": 6.137373737373736e-07,
"loss": 0.5088,
"step": 3930
},
{
"epoch": 0.2,
"grad_norm": 37.02779006958008,
"learning_rate": 6.127272727272727e-07,
"loss": 0.5441,
"step": 3940
},
{
"epoch": 0.2,
"grad_norm": 16.095773696899414,
"learning_rate": 6.117171717171717e-07,
"loss": 0.7542,
"step": 3950
},
{
"epoch": 0.2,
"grad_norm": 48.33674621582031,
"learning_rate": 6.107070707070708e-07,
"loss": 0.4938,
"step": 3960
},
{
"epoch": 0.2,
"grad_norm": 33.02685546875,
"learning_rate": 6.096969696969697e-07,
"loss": 0.6449,
"step": 3970
},
{
"epoch": 0.2,
"grad_norm": 18.588003158569336,
"learning_rate": 6.086868686868687e-07,
"loss": 0.3599,
"step": 3980
},
{
"epoch": 0.2,
"grad_norm": 46.80269241333008,
"learning_rate": 6.076767676767677e-07,
"loss": 0.5536,
"step": 3990
},
{
"epoch": 0.2,
"grad_norm": 29.706817626953125,
"learning_rate": 6.066666666666666e-07,
"loss": 0.6733,
"step": 4000
},
{
"epoch": 0.2,
"eval_loss": 0.5796153545379639,
"eval_runtime": 272.8278,
"eval_samples_per_second": 3.665,
"eval_steps_per_second": 3.665,
"step": 4000
},
{
"epoch": 0.2,
"grad_norm": 35.10763168334961,
"learning_rate": 6.056565656565657e-07,
"loss": 0.4982,
"step": 4010
},
{
"epoch": 0.2,
"grad_norm": 36.585968017578125,
"learning_rate": 6.046464646464646e-07,
"loss": 0.5096,
"step": 4020
},
{
"epoch": 0.2,
"grad_norm": 26.760963439941406,
"learning_rate": 6.036363636363636e-07,
"loss": 0.4632,
"step": 4030
},
{
"epoch": 0.2,
"grad_norm": 65.30345916748047,
"learning_rate": 6.026262626262626e-07,
"loss": 0.673,
"step": 4040
},
{
"epoch": 0.2,
"grad_norm": 34.205318450927734,
"learning_rate": 6.016161616161616e-07,
"loss": 0.5949,
"step": 4050
},
{
"epoch": 0.2,
"grad_norm": 8.336762428283691,
"learning_rate": 6.006060606060605e-07,
"loss": 0.5725,
"step": 4060
},
{
"epoch": 0.2,
"grad_norm": 23.612590789794922,
"learning_rate": 5.995959595959596e-07,
"loss": 0.5402,
"step": 4070
},
{
"epoch": 0.2,
"grad_norm": 4.146045207977295,
"learning_rate": 5.985858585858585e-07,
"loss": 0.5696,
"step": 4080
},
{
"epoch": 0.2,
"grad_norm": 18.502138137817383,
"learning_rate": 5.975757575757575e-07,
"loss": 0.551,
"step": 4090
},
{
"epoch": 0.2,
"grad_norm": 48.672119140625,
"learning_rate": 5.965656565656565e-07,
"loss": 0.713,
"step": 4100
},
{
"epoch": 0.21,
"grad_norm": 0.011800971813499928,
"learning_rate": 5.955555555555555e-07,
"loss": 0.5925,
"step": 4110
},
{
"epoch": 0.21,
"grad_norm": 48.50733947753906,
"learning_rate": 5.945454545454546e-07,
"loss": 0.6687,
"step": 4120
},
{
"epoch": 0.21,
"grad_norm": 0.01216745562851429,
"learning_rate": 5.935353535353536e-07,
"loss": 0.6491,
"step": 4130
},
{
"epoch": 0.21,
"grad_norm": 34.22660446166992,
"learning_rate": 5.925252525252525e-07,
"loss": 0.6262,
"step": 4140
},
{
"epoch": 0.21,
"grad_norm": 32.8122444152832,
"learning_rate": 5.915151515151515e-07,
"loss": 0.8408,
"step": 4150
},
{
"epoch": 0.21,
"grad_norm": 23.037616729736328,
"learning_rate": 5.905050505050505e-07,
"loss": 0.6201,
"step": 4160
},
{
"epoch": 0.21,
"grad_norm": 6.8174920082092285,
"learning_rate": 5.894949494949495e-07,
"loss": 0.6822,
"step": 4170
},
{
"epoch": 0.21,
"grad_norm": 3.4964330196380615,
"learning_rate": 5.884848484848485e-07,
"loss": 0.6287,
"step": 4180
},
{
"epoch": 0.21,
"grad_norm": 25.050561904907227,
"learning_rate": 5.874747474747474e-07,
"loss": 0.7394,
"step": 4190
},
{
"epoch": 0.21,
"grad_norm": 40.42102813720703,
"learning_rate": 5.864646464646465e-07,
"loss": 0.7008,
"step": 4200
},
{
"epoch": 0.21,
"grad_norm": 18.322233200073242,
"learning_rate": 5.854545454545454e-07,
"loss": 0.4913,
"step": 4210
},
{
"epoch": 0.21,
"grad_norm": 19.939029693603516,
"learning_rate": 5.844444444444444e-07,
"loss": 0.6533,
"step": 4220
},
{
"epoch": 0.21,
"grad_norm": 24.60533332824707,
"learning_rate": 5.834343434343434e-07,
"loss": 0.623,
"step": 4230
},
{
"epoch": 0.21,
"grad_norm": 20.365371704101562,
"learning_rate": 5.824242424242424e-07,
"loss": 0.7523,
"step": 4240
},
{
"epoch": 0.21,
"grad_norm": 50.654842376708984,
"learning_rate": 5.814141414141414e-07,
"loss": 0.5586,
"step": 4250
},
{
"epoch": 0.21,
"grad_norm": 28.067501068115234,
"learning_rate": 5.804040404040404e-07,
"loss": 0.8267,
"step": 4260
},
{
"epoch": 0.21,
"grad_norm": 64.40141296386719,
"learning_rate": 5.793939393939393e-07,
"loss": 0.5589,
"step": 4270
},
{
"epoch": 0.21,
"grad_norm": 58.74480438232422,
"learning_rate": 5.783838383838383e-07,
"loss": 0.508,
"step": 4280
},
{
"epoch": 0.21,
"grad_norm": 21.915971755981445,
"learning_rate": 5.773737373737374e-07,
"loss": 0.2656,
"step": 4290
},
{
"epoch": 0.21,
"grad_norm": 20.83277130126953,
"learning_rate": 5.763636363636363e-07,
"loss": 0.591,
"step": 4300
},
{
"epoch": 0.22,
"grad_norm": 21.14466094970703,
"learning_rate": 5.753535353535354e-07,
"loss": 0.5395,
"step": 4310
},
{
"epoch": 0.22,
"grad_norm": 0.9965362548828125,
"learning_rate": 5.743434343434343e-07,
"loss": 0.4719,
"step": 4320
},
{
"epoch": 0.22,
"grad_norm": 59.90948486328125,
"learning_rate": 5.733333333333334e-07,
"loss": 0.4387,
"step": 4330
},
{
"epoch": 0.22,
"grad_norm": 30.61522674560547,
"learning_rate": 5.723232323232323e-07,
"loss": 0.8272,
"step": 4340
},
{
"epoch": 0.22,
"grad_norm": 42.94061279296875,
"learning_rate": 5.713131313131313e-07,
"loss": 0.6145,
"step": 4350
},
{
"epoch": 0.22,
"grad_norm": 32.636966705322266,
"learning_rate": 5.703030303030303e-07,
"loss": 0.6995,
"step": 4360
},
{
"epoch": 0.22,
"grad_norm": 17.99488067626953,
"learning_rate": 5.692929292929293e-07,
"loss": 0.7309,
"step": 4370
},
{
"epoch": 0.22,
"grad_norm": 119.16413116455078,
"learning_rate": 5.682828282828282e-07,
"loss": 0.3749,
"step": 4380
},
{
"epoch": 0.22,
"grad_norm": 42.35875701904297,
"learning_rate": 5.672727272727273e-07,
"loss": 0.5476,
"step": 4390
},
{
"epoch": 0.22,
"grad_norm": 50.271358489990234,
"learning_rate": 5.662626262626262e-07,
"loss": 0.4938,
"step": 4400
},
{
"epoch": 0.22,
"grad_norm": 66.81938171386719,
"learning_rate": 5.652525252525253e-07,
"loss": 0.5849,
"step": 4410
},
{
"epoch": 0.22,
"grad_norm": 2.026270627975464,
"learning_rate": 5.642424242424242e-07,
"loss": 0.6664,
"step": 4420
},
{
"epoch": 0.22,
"grad_norm": 46.648406982421875,
"learning_rate": 5.632323232323231e-07,
"loss": 0.4565,
"step": 4430
},
{
"epoch": 0.22,
"grad_norm": 16.63812828063965,
"learning_rate": 5.622222222222222e-07,
"loss": 0.6162,
"step": 4440
},
{
"epoch": 0.22,
"grad_norm": 38.91732406616211,
"learning_rate": 5.612121212121211e-07,
"loss": 0.5521,
"step": 4450
},
{
"epoch": 0.22,
"grad_norm": 60.01820373535156,
"learning_rate": 5.602020202020202e-07,
"loss": 0.5168,
"step": 4460
},
{
"epoch": 0.22,
"grad_norm": 41.567169189453125,
"learning_rate": 5.591919191919192e-07,
"loss": 0.793,
"step": 4470
},
{
"epoch": 0.22,
"grad_norm": 28.133264541625977,
"learning_rate": 5.581818181818182e-07,
"loss": 0.3861,
"step": 4480
},
{
"epoch": 0.22,
"grad_norm": 62.19641876220703,
"learning_rate": 5.571717171717172e-07,
"loss": 0.6352,
"step": 4490
},
{
"epoch": 0.23,
"grad_norm": 57.30091857910156,
"learning_rate": 5.561616161616162e-07,
"loss": 0.69,
"step": 4500
},
{
"epoch": 0.23,
"grad_norm": 43.78639602661133,
"learning_rate": 5.551515151515151e-07,
"loss": 0.7222,
"step": 4510
},
{
"epoch": 0.23,
"grad_norm": 32.01860809326172,
"learning_rate": 5.541414141414142e-07,
"loss": 0.7186,
"step": 4520
},
{
"epoch": 0.23,
"grad_norm": 23.97758674621582,
"learning_rate": 5.531313131313131e-07,
"loss": 0.6255,
"step": 4530
},
{
"epoch": 0.23,
"grad_norm": 50.89016342163086,
"learning_rate": 5.521212121212121e-07,
"loss": 0.5228,
"step": 4540
},
{
"epoch": 0.23,
"grad_norm": 39.55519485473633,
"learning_rate": 5.511111111111111e-07,
"loss": 0.5833,
"step": 4550
},
{
"epoch": 0.23,
"grad_norm": 25.74945640563965,
"learning_rate": 5.5010101010101e-07,
"loss": 0.5973,
"step": 4560
},
{
"epoch": 0.23,
"grad_norm": 31.337507247924805,
"learning_rate": 5.490909090909091e-07,
"loss": 0.5308,
"step": 4570
},
{
"epoch": 0.23,
"grad_norm": 54.57097625732422,
"learning_rate": 5.48080808080808e-07,
"loss": 0.7696,
"step": 4580
},
{
"epoch": 0.23,
"grad_norm": 49.550113677978516,
"learning_rate": 5.47070707070707e-07,
"loss": 0.6118,
"step": 4590
},
{
"epoch": 0.23,
"grad_norm": 50.0933723449707,
"learning_rate": 5.46060606060606e-07,
"loss": 0.5375,
"step": 4600
},
{
"epoch": 0.23,
"grad_norm": 10.150845527648926,
"learning_rate": 5.45050505050505e-07,
"loss": 0.5641,
"step": 4610
},
{
"epoch": 0.23,
"grad_norm": 31.044645309448242,
"learning_rate": 5.440404040404039e-07,
"loss": 0.795,
"step": 4620
},
{
"epoch": 0.23,
"grad_norm": 0.17512640357017517,
"learning_rate": 5.430303030303031e-07,
"loss": 0.6226,
"step": 4630
},
{
"epoch": 0.23,
"grad_norm": 9.986194610595703,
"learning_rate": 5.42020202020202e-07,
"loss": 0.5268,
"step": 4640
},
{
"epoch": 0.23,
"grad_norm": 11.464401245117188,
"learning_rate": 5.410101010101011e-07,
"loss": 0.6265,
"step": 4650
},
{
"epoch": 0.23,
"grad_norm": 23.791200637817383,
"learning_rate": 5.4e-07,
"loss": 0.4169,
"step": 4660
},
{
"epoch": 0.23,
"grad_norm": 15.029647827148438,
"learning_rate": 5.38989898989899e-07,
"loss": 0.8112,
"step": 4670
},
{
"epoch": 0.23,
"grad_norm": 45.67643356323242,
"learning_rate": 5.37979797979798e-07,
"loss": 0.6152,
"step": 4680
},
{
"epoch": 0.23,
"grad_norm": 21.600475311279297,
"learning_rate": 5.36969696969697e-07,
"loss": 0.3425,
"step": 4690
},
{
"epoch": 0.23,
"grad_norm": 86.80847930908203,
"learning_rate": 5.359595959595959e-07,
"loss": 0.6447,
"step": 4700
},
{
"epoch": 0.24,
"grad_norm": 28.537168502807617,
"learning_rate": 5.34949494949495e-07,
"loss": 0.9382,
"step": 4710
},
{
"epoch": 0.24,
"grad_norm": 7.84658145904541,
"learning_rate": 5.339393939393939e-07,
"loss": 0.4908,
"step": 4720
},
{
"epoch": 0.24,
"grad_norm": 137.72291564941406,
"learning_rate": 5.329292929292929e-07,
"loss": 0.5919,
"step": 4730
},
{
"epoch": 0.24,
"grad_norm": 28.025161743164062,
"learning_rate": 5.319191919191919e-07,
"loss": 0.45,
"step": 4740
},
{
"epoch": 0.24,
"grad_norm": 23.230506896972656,
"learning_rate": 5.309090909090908e-07,
"loss": 0.5548,
"step": 4750
},
{
"epoch": 0.24,
"grad_norm": 1.3400743007659912,
"learning_rate": 5.298989898989899e-07,
"loss": 0.3073,
"step": 4760
},
{
"epoch": 0.24,
"grad_norm": 61.306583404541016,
"learning_rate": 5.288888888888888e-07,
"loss": 0.6393,
"step": 4770
},
{
"epoch": 0.24,
"grad_norm": 57.7855339050293,
"learning_rate": 5.278787878787878e-07,
"loss": 1.1223,
"step": 4780
},
{
"epoch": 0.24,
"grad_norm": 31.280473709106445,
"learning_rate": 5.268686868686868e-07,
"loss": 0.5766,
"step": 4790
},
{
"epoch": 0.24,
"grad_norm": 43.74176788330078,
"learning_rate": 5.258585858585859e-07,
"loss": 0.656,
"step": 4800
},
{
"epoch": 0.24,
"grad_norm": 9.490304946899414,
"learning_rate": 5.248484848484849e-07,
"loss": 0.6825,
"step": 4810
},
{
"epoch": 0.24,
"grad_norm": 13.078840255737305,
"learning_rate": 5.238383838383839e-07,
"loss": 0.3547,
"step": 4820
},
{
"epoch": 0.24,
"grad_norm": 6.858570575714111,
"learning_rate": 5.228282828282828e-07,
"loss": 0.4382,
"step": 4830
},
{
"epoch": 0.24,
"grad_norm": 40.836524963378906,
"learning_rate": 5.218181818181819e-07,
"loss": 0.737,
"step": 4840
},
{
"epoch": 0.24,
"grad_norm": 21.203798294067383,
"learning_rate": 5.208080808080808e-07,
"loss": 0.717,
"step": 4850
},
{
"epoch": 0.24,
"grad_norm": 25.54405403137207,
"learning_rate": 5.197979797979797e-07,
"loss": 0.5552,
"step": 4860
},
{
"epoch": 0.24,
"grad_norm": 24.849031448364258,
"learning_rate": 5.187878787878788e-07,
"loss": 0.6207,
"step": 4870
},
{
"epoch": 0.24,
"grad_norm": 59.22574234008789,
"learning_rate": 5.177777777777777e-07,
"loss": 0.4548,
"step": 4880
},
{
"epoch": 0.24,
"grad_norm": 49.025386810302734,
"learning_rate": 5.167676767676768e-07,
"loss": 0.4617,
"step": 4890
},
{
"epoch": 0.24,
"grad_norm": 18.983474731445312,
"learning_rate": 5.157575757575757e-07,
"loss": 0.6261,
"step": 4900
},
{
"epoch": 0.25,
"grad_norm": 24.251028060913086,
"learning_rate": 5.147474747474747e-07,
"loss": 0.7819,
"step": 4910
},
{
"epoch": 0.25,
"grad_norm": 33.367332458496094,
"learning_rate": 5.137373737373737e-07,
"loss": 0.7485,
"step": 4920
},
{
"epoch": 0.25,
"grad_norm": 16.02358627319336,
"learning_rate": 5.127272727272727e-07,
"loss": 0.4568,
"step": 4930
},
{
"epoch": 0.25,
"grad_norm": 21.30974578857422,
"learning_rate": 5.117171717171716e-07,
"loss": 0.7856,
"step": 4940
},
{
"epoch": 0.25,
"grad_norm": 6.685999393463135,
"learning_rate": 5.107070707070707e-07,
"loss": 0.2796,
"step": 4950
},
{
"epoch": 0.25,
"grad_norm": 9.83161735534668,
"learning_rate": 5.096969696969696e-07,
"loss": 0.4994,
"step": 4960
},
{
"epoch": 0.25,
"grad_norm": 28.18449592590332,
"learning_rate": 5.086868686868687e-07,
"loss": 0.4694,
"step": 4970
},
{
"epoch": 0.25,
"grad_norm": 12.509058952331543,
"learning_rate": 5.076767676767677e-07,
"loss": 0.5545,
"step": 4980
},
{
"epoch": 0.25,
"grad_norm": 10.555543899536133,
"learning_rate": 5.066666666666667e-07,
"loss": 0.3085,
"step": 4990
},
{
"epoch": 0.25,
"grad_norm": 30.451934814453125,
"learning_rate": 5.056565656565657e-07,
"loss": 0.4184,
"step": 5000
},
{
"epoch": 0.25,
"eval_loss": 0.6317839622497559,
"eval_runtime": 273.6525,
"eval_samples_per_second": 3.654,
"eval_steps_per_second": 3.654,
"step": 5000
},
{
"epoch": 0.25,
"grad_norm": 16.75673484802246,
"learning_rate": 5.046464646464646e-07,
"loss": 0.6444,
"step": 5010
},
{
"epoch": 0.25,
"grad_norm": 11.304834365844727,
"learning_rate": 5.036363636363636e-07,
"loss": 0.422,
"step": 5020
},
{
"epoch": 0.25,
"grad_norm": 18.923059463500977,
"learning_rate": 5.026262626262626e-07,
"loss": 0.744,
"step": 5030
},
{
"epoch": 0.25,
"grad_norm": 26.73674964904785,
"learning_rate": 5.016161616161616e-07,
"loss": 0.3386,
"step": 5040
},
{
"epoch": 0.25,
"grad_norm": 1.0594570636749268,
"learning_rate": 5.006060606060606e-07,
"loss": 0.6157,
"step": 5050
},
{
"epoch": 0.25,
"grad_norm": 20.98480796813965,
"learning_rate": 4.995959595959596e-07,
"loss": 0.4157,
"step": 5060
},
{
"epoch": 0.25,
"grad_norm": 47.005348205566406,
"learning_rate": 4.985858585858585e-07,
"loss": 0.7854,
"step": 5070
},
{
"epoch": 0.25,
"grad_norm": 18.866960525512695,
"learning_rate": 4.975757575757576e-07,
"loss": 0.6145,
"step": 5080
},
{
"epoch": 0.25,
"grad_norm": 12.678258895874023,
"learning_rate": 4.965656565656565e-07,
"loss": 0.3665,
"step": 5090
},
{
"epoch": 0.26,
"grad_norm": 56.87771987915039,
"learning_rate": 4.955555555555556e-07,
"loss": 0.6198,
"step": 5100
},
{
"epoch": 0.26,
"grad_norm": 7.225916862487793,
"learning_rate": 4.945454545454545e-07,
"loss": 0.519,
"step": 5110
},
{
"epoch": 0.26,
"grad_norm": 23.896451950073242,
"learning_rate": 4.935353535353536e-07,
"loss": 0.5161,
"step": 5120
},
{
"epoch": 0.26,
"grad_norm": 6.715981483459473,
"learning_rate": 4.925252525252525e-07,
"loss": 0.4298,
"step": 5130
},
{
"epoch": 0.26,
"grad_norm": 41.35715866088867,
"learning_rate": 4.915151515151514e-07,
"loss": 0.83,
"step": 5140
},
{
"epoch": 0.26,
"grad_norm": 16.183509826660156,
"learning_rate": 4.905050505050505e-07,
"loss": 0.5437,
"step": 5150
},
{
"epoch": 0.26,
"grad_norm": 35.6097412109375,
"learning_rate": 4.894949494949494e-07,
"loss": 0.7177,
"step": 5160
},
{
"epoch": 0.26,
"grad_norm": 40.16917037963867,
"learning_rate": 4.884848484848485e-07,
"loss": 0.7704,
"step": 5170
},
{
"epoch": 0.26,
"grad_norm": 44.37532424926758,
"learning_rate": 4.874747474747474e-07,
"loss": 0.7467,
"step": 5180
},
{
"epoch": 0.26,
"grad_norm": 48.34253692626953,
"learning_rate": 4.864646464646465e-07,
"loss": 0.5483,
"step": 5190
},
{
"epoch": 0.26,
"grad_norm": 24.17354965209961,
"learning_rate": 4.854545454545454e-07,
"loss": 0.4913,
"step": 5200
},
{
"epoch": 0.26,
"grad_norm": 35.74309539794922,
"learning_rate": 4.844444444444445e-07,
"loss": 0.5399,
"step": 5210
},
{
"epoch": 0.26,
"grad_norm": 27.378297805786133,
"learning_rate": 4.834343434343434e-07,
"loss": 0.6222,
"step": 5220
},
{
"epoch": 0.26,
"grad_norm": 46.51097869873047,
"learning_rate": 4.824242424242424e-07,
"loss": 0.4005,
"step": 5230
},
{
"epoch": 0.26,
"grad_norm": 53.12395477294922,
"learning_rate": 4.814141414141414e-07,
"loss": 0.6066,
"step": 5240
},
{
"epoch": 0.26,
"grad_norm": 16.78499412536621,
"learning_rate": 4.804040404040404e-07,
"loss": 0.6476,
"step": 5250
},
{
"epoch": 0.26,
"grad_norm": 24.126611709594727,
"learning_rate": 4.793939393939393e-07,
"loss": 0.5924,
"step": 5260
},
{
"epoch": 0.26,
"grad_norm": 93.89410400390625,
"learning_rate": 4.783838383838384e-07,
"loss": 0.596,
"step": 5270
},
{
"epoch": 0.26,
"grad_norm": 9.708739280700684,
"learning_rate": 4.773737373737374e-07,
"loss": 0.4209,
"step": 5280
},
{
"epoch": 0.26,
"grad_norm": 13.41931438446045,
"learning_rate": 4.7636363636363635e-07,
"loss": 0.5598,
"step": 5290
},
{
"epoch": 0.27,
"grad_norm": 41.988311767578125,
"learning_rate": 4.7535353535353534e-07,
"loss": 0.7958,
"step": 5300
},
{
"epoch": 0.27,
"grad_norm": 61.960548400878906,
"learning_rate": 4.7434343434343434e-07,
"loss": 0.643,
"step": 5310
},
{
"epoch": 0.27,
"grad_norm": 23.325374603271484,
"learning_rate": 4.733333333333333e-07,
"loss": 0.7438,
"step": 5320
},
{
"epoch": 0.27,
"grad_norm": 32.468814849853516,
"learning_rate": 4.723232323232323e-07,
"loss": 0.4068,
"step": 5330
},
{
"epoch": 0.27,
"grad_norm": 0.005862310528755188,
"learning_rate": 4.713131313131313e-07,
"loss": 0.5128,
"step": 5340
},
{
"epoch": 0.27,
"grad_norm": 4.65062141418457,
"learning_rate": 4.703030303030303e-07,
"loss": 0.3018,
"step": 5350
},
{
"epoch": 0.27,
"grad_norm": 18.1551456451416,
"learning_rate": 4.6929292929292927e-07,
"loss": 0.5412,
"step": 5360
},
{
"epoch": 0.27,
"grad_norm": 21.883535385131836,
"learning_rate": 4.6828282828282827e-07,
"loss": 0.3498,
"step": 5370
},
{
"epoch": 0.27,
"grad_norm": 3.3442208766937256,
"learning_rate": 4.6727272727272727e-07,
"loss": 0.3586,
"step": 5380
},
{
"epoch": 0.27,
"grad_norm": 10.725494384765625,
"learning_rate": 4.6626262626262626e-07,
"loss": 0.5572,
"step": 5390
},
{
"epoch": 0.27,
"grad_norm": 54.03162384033203,
"learning_rate": 4.652525252525252e-07,
"loss": 0.699,
"step": 5400
},
{
"epoch": 0.27,
"grad_norm": 66.64715576171875,
"learning_rate": 4.642424242424242e-07,
"loss": 0.6599,
"step": 5410
},
{
"epoch": 0.27,
"grad_norm": 60.05572509765625,
"learning_rate": 4.632323232323232e-07,
"loss": 0.8139,
"step": 5420
},
{
"epoch": 0.27,
"grad_norm": 81.59327697753906,
"learning_rate": 4.622222222222222e-07,
"loss": 0.6494,
"step": 5430
},
{
"epoch": 0.27,
"grad_norm": 10.984243392944336,
"learning_rate": 4.6121212121212114e-07,
"loss": 0.3774,
"step": 5440
},
{
"epoch": 0.27,
"grad_norm": 9.627668380737305,
"learning_rate": 4.602020202020202e-07,
"loss": 0.4846,
"step": 5450
},
{
"epoch": 0.27,
"grad_norm": 34.14588928222656,
"learning_rate": 4.591919191919192e-07,
"loss": 0.6254,
"step": 5460
},
{
"epoch": 0.27,
"grad_norm": 23.76165199279785,
"learning_rate": 4.581818181818182e-07,
"loss": 0.6575,
"step": 5470
},
{
"epoch": 0.27,
"grad_norm": 21.314247131347656,
"learning_rate": 4.5717171717171713e-07,
"loss": 0.666,
"step": 5480
},
{
"epoch": 0.27,
"grad_norm": 16.340349197387695,
"learning_rate": 4.561616161616161e-07,
"loss": 0.7248,
"step": 5490
},
{
"epoch": 0.28,
"grad_norm": 12.984785079956055,
"learning_rate": 4.551515151515151e-07,
"loss": 0.5881,
"step": 5500
},
{
"epoch": 0.28,
"grad_norm": 32.25109100341797,
"learning_rate": 4.541414141414141e-07,
"loss": 0.6264,
"step": 5510
},
{
"epoch": 0.28,
"grad_norm": 58.724952697753906,
"learning_rate": 4.5313131313131306e-07,
"loss": 0.558,
"step": 5520
},
{
"epoch": 0.28,
"grad_norm": 42.230377197265625,
"learning_rate": 4.521212121212121e-07,
"loss": 0.7187,
"step": 5530
},
{
"epoch": 0.28,
"grad_norm": 48.40780258178711,
"learning_rate": 4.511111111111111e-07,
"loss": 0.6857,
"step": 5540
},
{
"epoch": 0.28,
"grad_norm": 16.174091339111328,
"learning_rate": 4.501010101010101e-07,
"loss": 0.4787,
"step": 5550
},
{
"epoch": 0.28,
"grad_norm": 19.75393295288086,
"learning_rate": 4.4909090909090905e-07,
"loss": 0.4903,
"step": 5560
},
{
"epoch": 0.28,
"grad_norm": 22.77608299255371,
"learning_rate": 4.4808080808080805e-07,
"loss": 0.8591,
"step": 5570
},
{
"epoch": 0.28,
"grad_norm": 28.29513931274414,
"learning_rate": 4.4707070707070704e-07,
"loss": 0.3831,
"step": 5580
},
{
"epoch": 0.28,
"grad_norm": 0.01570923626422882,
"learning_rate": 4.4606060606060604e-07,
"loss": 0.4993,
"step": 5590
},
{
"epoch": 0.28,
"grad_norm": 42.494590759277344,
"learning_rate": 4.45050505050505e-07,
"loss": 0.6979,
"step": 5600
},
{
"epoch": 0.28,
"grad_norm": 11.708600044250488,
"learning_rate": 4.44040404040404e-07,
"loss": 0.4699,
"step": 5610
},
{
"epoch": 0.28,
"grad_norm": 84.38910675048828,
"learning_rate": 4.4303030303030303e-07,
"loss": 0.5278,
"step": 5620
},
{
"epoch": 0.28,
"grad_norm": 20.805999755859375,
"learning_rate": 4.4202020202020203e-07,
"loss": 0.5134,
"step": 5630
},
{
"epoch": 0.28,
"grad_norm": 21.2529354095459,
"learning_rate": 4.4101010101010097e-07,
"loss": 0.5041,
"step": 5640
},
{
"epoch": 0.28,
"grad_norm": 66.35508728027344,
"learning_rate": 4.3999999999999997e-07,
"loss": 0.5179,
"step": 5650
},
{
"epoch": 0.28,
"grad_norm": 87.20722961425781,
"learning_rate": 4.3898989898989897e-07,
"loss": 0.743,
"step": 5660
},
{
"epoch": 0.28,
"grad_norm": 61.29191589355469,
"learning_rate": 4.3797979797979796e-07,
"loss": 0.6709,
"step": 5670
},
{
"epoch": 0.28,
"grad_norm": 14.595233917236328,
"learning_rate": 4.369696969696969e-07,
"loss": 0.4567,
"step": 5680
},
{
"epoch": 0.28,
"grad_norm": 49.612754821777344,
"learning_rate": 4.359595959595959e-07,
"loss": 0.6162,
"step": 5690
},
{
"epoch": 0.28,
"grad_norm": 64.8648681640625,
"learning_rate": 4.3494949494949495e-07,
"loss": 0.4786,
"step": 5700
},
{
"epoch": 0.29,
"grad_norm": 19.547813415527344,
"learning_rate": 4.3393939393939395e-07,
"loss": 0.5041,
"step": 5710
},
{
"epoch": 0.29,
"grad_norm": 19.075836181640625,
"learning_rate": 4.3292929292929295e-07,
"loss": 0.4912,
"step": 5720
},
{
"epoch": 0.29,
"grad_norm": 2.2977592945098877,
"learning_rate": 4.319191919191919e-07,
"loss": 0.6704,
"step": 5730
},
{
"epoch": 0.29,
"grad_norm": 13.33616828918457,
"learning_rate": 4.309090909090909e-07,
"loss": 0.3042,
"step": 5740
},
{
"epoch": 0.29,
"grad_norm": 45.46590042114258,
"learning_rate": 4.298989898989899e-07,
"loss": 0.8667,
"step": 5750
},
{
"epoch": 0.29,
"grad_norm": 26.082550048828125,
"learning_rate": 4.2888888888888883e-07,
"loss": 0.5301,
"step": 5760
},
{
"epoch": 0.29,
"grad_norm": 18.556835174560547,
"learning_rate": 4.278787878787878e-07,
"loss": 0.5641,
"step": 5770
},
{
"epoch": 0.29,
"grad_norm": 33.3296012878418,
"learning_rate": 4.268686868686868e-07,
"loss": 0.9849,
"step": 5780
},
{
"epoch": 0.29,
"grad_norm": 8.51718807220459,
"learning_rate": 4.2585858585858587e-07,
"loss": 0.678,
"step": 5790
},
{
"epoch": 0.29,
"grad_norm": 48.53915786743164,
"learning_rate": 4.2484848484848487e-07,
"loss": 0.9045,
"step": 5800
},
{
"epoch": 0.29,
"grad_norm": 25.81182098388672,
"learning_rate": 4.238383838383838e-07,
"loss": 0.7647,
"step": 5810
},
{
"epoch": 0.29,
"grad_norm": 19.481019973754883,
"learning_rate": 4.228282828282828e-07,
"loss": 0.6162,
"step": 5820
},
{
"epoch": 0.29,
"grad_norm": 6.070164203643799,
"learning_rate": 4.218181818181818e-07,
"loss": 0.4276,
"step": 5830
},
{
"epoch": 0.29,
"grad_norm": 45.53562545776367,
"learning_rate": 4.208080808080808e-07,
"loss": 0.5479,
"step": 5840
},
{
"epoch": 0.29,
"grad_norm": 36.35445022583008,
"learning_rate": 4.1979797979797975e-07,
"loss": 0.4958,
"step": 5850
},
{
"epoch": 0.29,
"grad_norm": 0.0024314168840646744,
"learning_rate": 4.1878787878787875e-07,
"loss": 0.5411,
"step": 5860
},
{
"epoch": 0.29,
"grad_norm": 29.073923110961914,
"learning_rate": 4.177777777777778e-07,
"loss": 0.588,
"step": 5870
},
{
"epoch": 0.29,
"grad_norm": 27.702180862426758,
"learning_rate": 4.167676767676768e-07,
"loss": 0.5949,
"step": 5880
},
{
"epoch": 0.29,
"grad_norm": 31.89980697631836,
"learning_rate": 4.1575757575757574e-07,
"loss": 0.9579,
"step": 5890
},
{
"epoch": 0.29,
"grad_norm": 16.54688262939453,
"learning_rate": 4.1474747474747473e-07,
"loss": 0.483,
"step": 5900
},
{
"epoch": 0.3,
"grad_norm": 17.019641876220703,
"learning_rate": 4.1373737373737373e-07,
"loss": 0.5992,
"step": 5910
},
{
"epoch": 0.3,
"grad_norm": 60.383583068847656,
"learning_rate": 4.1272727272727273e-07,
"loss": 0.6877,
"step": 5920
},
{
"epoch": 0.3,
"grad_norm": 31.626358032226562,
"learning_rate": 4.1171717171717167e-07,
"loss": 0.3786,
"step": 5930
},
{
"epoch": 0.3,
"grad_norm": 41.01628112792969,
"learning_rate": 4.1070707070707067e-07,
"loss": 1.0079,
"step": 5940
},
{
"epoch": 0.3,
"grad_norm": 5.4029645919799805,
"learning_rate": 4.0969696969696966e-07,
"loss": 0.2839,
"step": 5950
},
{
"epoch": 0.3,
"grad_norm": 25.453466415405273,
"learning_rate": 4.086868686868687e-07,
"loss": 0.3568,
"step": 5960
},
{
"epoch": 0.3,
"grad_norm": 37.09002685546875,
"learning_rate": 4.0767676767676766e-07,
"loss": 0.6424,
"step": 5970
},
{
"epoch": 0.3,
"grad_norm": 72.52601623535156,
"learning_rate": 4.0666666666666666e-07,
"loss": 0.5851,
"step": 5980
},
{
"epoch": 0.3,
"grad_norm": 24.996829986572266,
"learning_rate": 4.0565656565656565e-07,
"loss": 0.3083,
"step": 5990
},
{
"epoch": 0.3,
"grad_norm": 6.386436939239502,
"learning_rate": 4.0464646464646465e-07,
"loss": 0.5214,
"step": 6000
},
{
"epoch": 0.3,
"eval_loss": 0.5321834683418274,
"eval_runtime": 272.7035,
"eval_samples_per_second": 3.667,
"eval_steps_per_second": 3.667,
"step": 6000
},
{
"epoch": 0.3,
"grad_norm": 55.56099319458008,
"learning_rate": 4.036363636363636e-07,
"loss": 0.5058,
"step": 6010
},
{
"epoch": 0.3,
"grad_norm": 35.558677673339844,
"learning_rate": 4.026262626262626e-07,
"loss": 0.4482,
"step": 6020
},
{
"epoch": 0.3,
"grad_norm": 44.231929779052734,
"learning_rate": 4.016161616161616e-07,
"loss": 0.5993,
"step": 6030
},
{
"epoch": 0.3,
"grad_norm": 27.5611629486084,
"learning_rate": 4.0060606060606064e-07,
"loss": 0.5536,
"step": 6040
},
{
"epoch": 0.3,
"grad_norm": 25.483076095581055,
"learning_rate": 3.995959595959596e-07,
"loss": 0.7658,
"step": 6050
},
{
"epoch": 0.3,
"grad_norm": 35.32095718383789,
"learning_rate": 3.985858585858586e-07,
"loss": 0.4792,
"step": 6060
},
{
"epoch": 0.3,
"grad_norm": 70.55352783203125,
"learning_rate": 3.975757575757576e-07,
"loss": 0.6017,
"step": 6070
},
{
"epoch": 0.3,
"grad_norm": 64.90217590332031,
"learning_rate": 3.9656565656565657e-07,
"loss": 0.8276,
"step": 6080
},
{
"epoch": 0.3,
"grad_norm": 25.38896369934082,
"learning_rate": 3.955555555555555e-07,
"loss": 0.5957,
"step": 6090
},
{
"epoch": 0.3,
"grad_norm": 35.307044982910156,
"learning_rate": 3.945454545454545e-07,
"loss": 0.5293,
"step": 6100
},
{
"epoch": 0.31,
"grad_norm": 28.193256378173828,
"learning_rate": 3.935353535353535e-07,
"loss": 0.5843,
"step": 6110
},
{
"epoch": 0.31,
"grad_norm": 21.146955490112305,
"learning_rate": 3.925252525252525e-07,
"loss": 0.496,
"step": 6120
},
{
"epoch": 0.31,
"grad_norm": 27.107250213623047,
"learning_rate": 3.915151515151515e-07,
"loss": 0.6787,
"step": 6130
},
{
"epoch": 0.31,
"grad_norm": 11.974008560180664,
"learning_rate": 3.905050505050505e-07,
"loss": 0.3089,
"step": 6140
},
{
"epoch": 0.31,
"grad_norm": 52.8520622253418,
"learning_rate": 3.894949494949495e-07,
"loss": 0.4683,
"step": 6150
},
{
"epoch": 0.31,
"grad_norm": 22.483945846557617,
"learning_rate": 3.884848484848485e-07,
"loss": 0.3507,
"step": 6160
},
{
"epoch": 0.31,
"grad_norm": 10.108546257019043,
"learning_rate": 3.8747474747474744e-07,
"loss": 0.5339,
"step": 6170
},
{
"epoch": 0.31,
"grad_norm": 13.516547203063965,
"learning_rate": 3.8646464646464643e-07,
"loss": 0.5166,
"step": 6180
},
{
"epoch": 0.31,
"grad_norm": 32.35124969482422,
"learning_rate": 3.8545454545454543e-07,
"loss": 0.4496,
"step": 6190
},
{
"epoch": 0.31,
"grad_norm": 21.446962356567383,
"learning_rate": 3.8444444444444443e-07,
"loss": 0.51,
"step": 6200
},
{
"epoch": 0.31,
"grad_norm": 46.2486572265625,
"learning_rate": 3.834343434343434e-07,
"loss": 0.7133,
"step": 6210
},
{
"epoch": 0.31,
"grad_norm": 59.068603515625,
"learning_rate": 3.824242424242424e-07,
"loss": 0.537,
"step": 6220
},
{
"epoch": 0.31,
"grad_norm": 33.21735763549805,
"learning_rate": 3.814141414141414e-07,
"loss": 0.6223,
"step": 6230
},
{
"epoch": 0.31,
"grad_norm": 33.43574905395508,
"learning_rate": 3.804040404040404e-07,
"loss": 0.5346,
"step": 6240
},
{
"epoch": 0.31,
"grad_norm": 40.06599044799805,
"learning_rate": 3.7939393939393936e-07,
"loss": 0.6537,
"step": 6250
},
{
"epoch": 0.31,
"grad_norm": 23.345809936523438,
"learning_rate": 3.7838383838383836e-07,
"loss": 0.4532,
"step": 6260
},
{
"epoch": 0.31,
"grad_norm": 23.654773712158203,
"learning_rate": 3.7737373737373735e-07,
"loss": 0.6201,
"step": 6270
},
{
"epoch": 0.31,
"grad_norm": 46.89964294433594,
"learning_rate": 3.7636363636363635e-07,
"loss": 0.6033,
"step": 6280
},
{
"epoch": 0.31,
"grad_norm": 41.876686096191406,
"learning_rate": 3.753535353535353e-07,
"loss": 0.3773,
"step": 6290
},
{
"epoch": 0.32,
"grad_norm": 18.634048461914062,
"learning_rate": 3.7434343434343434e-07,
"loss": 0.4174,
"step": 6300
},
{
"epoch": 0.32,
"grad_norm": 8.50839900970459,
"learning_rate": 3.7333333333333334e-07,
"loss": 0.6905,
"step": 6310
},
{
"epoch": 0.32,
"grad_norm": 39.878028869628906,
"learning_rate": 3.7232323232323234e-07,
"loss": 0.4222,
"step": 6320
},
{
"epoch": 0.32,
"grad_norm": 63.93856430053711,
"learning_rate": 3.713131313131313e-07,
"loss": 0.61,
"step": 6330
},
{
"epoch": 0.32,
"grad_norm": 0.07593978196382523,
"learning_rate": 3.703030303030303e-07,
"loss": 0.3598,
"step": 6340
},
{
"epoch": 0.32,
"grad_norm": 20.610122680664062,
"learning_rate": 3.692929292929293e-07,
"loss": 0.5141,
"step": 6350
},
{
"epoch": 0.32,
"grad_norm": 18.46574592590332,
"learning_rate": 3.6828282828282827e-07,
"loss": 0.5548,
"step": 6360
},
{
"epoch": 0.32,
"grad_norm": 54.231014251708984,
"learning_rate": 3.672727272727272e-07,
"loss": 0.8787,
"step": 6370
},
{
"epoch": 0.32,
"grad_norm": 69.66897583007812,
"learning_rate": 3.6626262626262627e-07,
"loss": 0.4716,
"step": 6380
},
{
"epoch": 0.32,
"grad_norm": 0.17907452583312988,
"learning_rate": 3.6525252525252526e-07,
"loss": 0.5845,
"step": 6390
},
{
"epoch": 0.32,
"grad_norm": 3.359571695327759,
"learning_rate": 3.6424242424242426e-07,
"loss": 0.5011,
"step": 6400
},
{
"epoch": 0.32,
"grad_norm": 25.506729125976562,
"learning_rate": 3.632323232323232e-07,
"loss": 0.5965,
"step": 6410
},
{
"epoch": 0.32,
"grad_norm": 14.554071426391602,
"learning_rate": 3.622222222222222e-07,
"loss": 0.6156,
"step": 6420
},
{
"epoch": 0.32,
"grad_norm": 43.3342399597168,
"learning_rate": 3.612121212121212e-07,
"loss": 0.697,
"step": 6430
},
{
"epoch": 0.32,
"grad_norm": 33.21229934692383,
"learning_rate": 3.602020202020202e-07,
"loss": 0.5572,
"step": 6440
},
{
"epoch": 0.32,
"grad_norm": 46.52666091918945,
"learning_rate": 3.5919191919191914e-07,
"loss": 0.6978,
"step": 6450
},
{
"epoch": 0.32,
"grad_norm": 7.359593391418457,
"learning_rate": 3.5818181818181814e-07,
"loss": 0.525,
"step": 6460
},
{
"epoch": 0.32,
"grad_norm": 47.78269958496094,
"learning_rate": 3.571717171717172e-07,
"loss": 0.5736,
"step": 6470
},
{
"epoch": 0.32,
"grad_norm": 50.07778549194336,
"learning_rate": 3.561616161616162e-07,
"loss": 1.1085,
"step": 6480
},
{
"epoch": 0.32,
"grad_norm": 15.350699424743652,
"learning_rate": 3.551515151515151e-07,
"loss": 0.6755,
"step": 6490
},
{
"epoch": 0.33,
"grad_norm": 15.840493202209473,
"learning_rate": 3.541414141414141e-07,
"loss": 0.4392,
"step": 6500
},
{
"epoch": 0.33,
"grad_norm": 55.45235061645508,
"learning_rate": 3.531313131313131e-07,
"loss": 0.5413,
"step": 6510
},
{
"epoch": 0.33,
"grad_norm": 23.71851921081543,
"learning_rate": 3.521212121212121e-07,
"loss": 0.4429,
"step": 6520
},
{
"epoch": 0.33,
"grad_norm": 17.28203773498535,
"learning_rate": 3.5111111111111106e-07,
"loss": 0.8087,
"step": 6530
},
{
"epoch": 0.33,
"grad_norm": 15.36353874206543,
"learning_rate": 3.5010101010101006e-07,
"loss": 0.3228,
"step": 6540
},
{
"epoch": 0.33,
"grad_norm": 1.450569748878479,
"learning_rate": 3.490909090909091e-07,
"loss": 0.4902,
"step": 6550
},
{
"epoch": 0.33,
"grad_norm": 0.08204346895217896,
"learning_rate": 3.480808080808081e-07,
"loss": 0.4472,
"step": 6560
},
{
"epoch": 0.33,
"grad_norm": 18.140419006347656,
"learning_rate": 3.4707070707070705e-07,
"loss": 0.4496,
"step": 6570
},
{
"epoch": 0.33,
"grad_norm": 33.478553771972656,
"learning_rate": 3.4606060606060605e-07,
"loss": 0.5778,
"step": 6580
},
{
"epoch": 0.33,
"grad_norm": 62.528663635253906,
"learning_rate": 3.4505050505050504e-07,
"loss": 0.6403,
"step": 6590
},
{
"epoch": 0.33,
"grad_norm": 58.19050216674805,
"learning_rate": 3.4404040404040404e-07,
"loss": 0.4827,
"step": 6600
},
{
"epoch": 0.33,
"grad_norm": 36.107032775878906,
"learning_rate": 3.43030303030303e-07,
"loss": 0.3823,
"step": 6610
},
{
"epoch": 0.33,
"grad_norm": 43.23326110839844,
"learning_rate": 3.42020202020202e-07,
"loss": 0.7228,
"step": 6620
},
{
"epoch": 0.33,
"grad_norm": 26.881813049316406,
"learning_rate": 3.41010101010101e-07,
"loss": 0.5037,
"step": 6630
},
{
"epoch": 0.33,
"grad_norm": 21.51839828491211,
"learning_rate": 3.4000000000000003e-07,
"loss": 0.4308,
"step": 6640
},
{
"epoch": 0.33,
"grad_norm": 13.050909996032715,
"learning_rate": 3.390909090909091e-07,
"loss": 0.6291,
"step": 6650
},
{
"epoch": 0.33,
"grad_norm": 14.444952964782715,
"learning_rate": 3.380808080808081e-07,
"loss": 0.2896,
"step": 6660
},
{
"epoch": 0.33,
"grad_norm": 0.09012435376644135,
"learning_rate": 3.3707070707070704e-07,
"loss": 0.6965,
"step": 6670
},
{
"epoch": 0.33,
"grad_norm": 4.841560363769531,
"learning_rate": 3.3606060606060604e-07,
"loss": 0.5084,
"step": 6680
},
{
"epoch": 0.33,
"grad_norm": 4.30256462097168,
"learning_rate": 3.3505050505050503e-07,
"loss": 0.4721,
"step": 6690
},
{
"epoch": 0.34,
"grad_norm": 21.05469512939453,
"learning_rate": 3.3404040404040403e-07,
"loss": 0.3786,
"step": 6700
},
{
"epoch": 0.34,
"grad_norm": 33.12663650512695,
"learning_rate": 3.3303030303030297e-07,
"loss": 0.4973,
"step": 6710
},
{
"epoch": 0.34,
"grad_norm": 13.384597778320312,
"learning_rate": 3.3202020202020197e-07,
"loss": 0.4733,
"step": 6720
},
{
"epoch": 0.34,
"grad_norm": 34.00277328491211,
"learning_rate": 3.31010101010101e-07,
"loss": 0.7499,
"step": 6730
},
{
"epoch": 0.34,
"grad_norm": 47.63022232055664,
"learning_rate": 3.3e-07,
"loss": 0.5514,
"step": 6740
},
{
"epoch": 0.34,
"grad_norm": 7.467356204986572,
"learning_rate": 3.2898989898989896e-07,
"loss": 0.781,
"step": 6750
},
{
"epoch": 0.34,
"grad_norm": 12.294841766357422,
"learning_rate": 3.2797979797979796e-07,
"loss": 0.4843,
"step": 6760
},
{
"epoch": 0.34,
"grad_norm": 61.1748046875,
"learning_rate": 3.2696969696969695e-07,
"loss": 0.4429,
"step": 6770
},
{
"epoch": 0.34,
"grad_norm": 2.0294747352600098,
"learning_rate": 3.2595959595959595e-07,
"loss": 0.5307,
"step": 6780
},
{
"epoch": 0.34,
"grad_norm": 18.16761589050293,
"learning_rate": 3.249494949494949e-07,
"loss": 0.8004,
"step": 6790
},
{
"epoch": 0.34,
"grad_norm": 28.33771324157715,
"learning_rate": 3.239393939393939e-07,
"loss": 0.6191,
"step": 6800
},
{
"epoch": 0.34,
"grad_norm": 21.200998306274414,
"learning_rate": 3.2292929292929294e-07,
"loss": 0.5335,
"step": 6810
},
{
"epoch": 0.34,
"grad_norm": 36.64286422729492,
"learning_rate": 3.2191919191919194e-07,
"loss": 0.7535,
"step": 6820
},
{
"epoch": 0.34,
"grad_norm": 39.46211242675781,
"learning_rate": 3.209090909090909e-07,
"loss": 0.3484,
"step": 6830
},
{
"epoch": 0.34,
"grad_norm": 24.213472366333008,
"learning_rate": 3.198989898989899e-07,
"loss": 0.4403,
"step": 6840
},
{
"epoch": 0.34,
"grad_norm": 37.36027526855469,
"learning_rate": 3.188888888888889e-07,
"loss": 0.5869,
"step": 6850
},
{
"epoch": 0.34,
"grad_norm": 25.690786361694336,
"learning_rate": 3.178787878787879e-07,
"loss": 0.3795,
"step": 6860
},
{
"epoch": 0.34,
"grad_norm": 39.07474899291992,
"learning_rate": 3.168686868686868e-07,
"loss": 0.8754,
"step": 6870
},
{
"epoch": 0.34,
"grad_norm": 8.781753540039062,
"learning_rate": 3.158585858585858e-07,
"loss": 0.7969,
"step": 6880
},
{
"epoch": 0.34,
"grad_norm": 21.158477783203125,
"learning_rate": 3.148484848484848e-07,
"loss": 0.4478,
"step": 6890
},
{
"epoch": 0.34,
"grad_norm": 38.86679458618164,
"learning_rate": 3.1383838383838386e-07,
"loss": 0.3951,
"step": 6900
},
{
"epoch": 0.35,
"grad_norm": 42.012882232666016,
"learning_rate": 3.128282828282828e-07,
"loss": 0.4561,
"step": 6910
},
{
"epoch": 0.35,
"grad_norm": 24.203311920166016,
"learning_rate": 3.118181818181818e-07,
"loss": 0.5513,
"step": 6920
},
{
"epoch": 0.35,
"grad_norm": 47.63656997680664,
"learning_rate": 3.108080808080808e-07,
"loss": 0.6389,
"step": 6930
},
{
"epoch": 0.35,
"grad_norm": 6.598226070404053,
"learning_rate": 3.097979797979798e-07,
"loss": 0.4206,
"step": 6940
},
{
"epoch": 0.35,
"grad_norm": 24.48682403564453,
"learning_rate": 3.0878787878787874e-07,
"loss": 0.8116,
"step": 6950
},
{
"epoch": 0.35,
"grad_norm": 67.7586898803711,
"learning_rate": 3.0777777777777774e-07,
"loss": 0.4623,
"step": 6960
},
{
"epoch": 0.35,
"grad_norm": 30.9173583984375,
"learning_rate": 3.0676767676767673e-07,
"loss": 0.5719,
"step": 6970
},
{
"epoch": 0.35,
"grad_norm": 20.1796817779541,
"learning_rate": 3.057575757575758e-07,
"loss": 0.4395,
"step": 6980
},
{
"epoch": 0.35,
"grad_norm": 52.20895004272461,
"learning_rate": 3.0474747474747473e-07,
"loss": 0.3088,
"step": 6990
},
{
"epoch": 0.35,
"grad_norm": 24.105398178100586,
"learning_rate": 3.037373737373737e-07,
"loss": 0.9203,
"step": 7000
},
{
"epoch": 0.35,
"eval_loss": 0.6140025854110718,
"eval_runtime": 272.9044,
"eval_samples_per_second": 3.664,
"eval_steps_per_second": 3.664,
"step": 7000
},
{
"epoch": 0.35,
"grad_norm": 43.471683502197266,
"learning_rate": 3.027272727272727e-07,
"loss": 0.4468,
"step": 7010
},
{
"epoch": 0.35,
"grad_norm": 15.628482818603516,
"learning_rate": 3.017171717171717e-07,
"loss": 0.5313,
"step": 7020
},
{
"epoch": 0.35,
"grad_norm": 19.517107009887695,
"learning_rate": 3.0070707070707066e-07,
"loss": 0.6564,
"step": 7030
},
{
"epoch": 0.35,
"grad_norm": 20.48263931274414,
"learning_rate": 2.9969696969696966e-07,
"loss": 0.6433,
"step": 7040
},
{
"epoch": 0.35,
"grad_norm": 87.30323791503906,
"learning_rate": 2.9868686868686866e-07,
"loss": 0.4559,
"step": 7050
},
{
"epoch": 0.35,
"grad_norm": 23.63374900817871,
"learning_rate": 2.9767676767676765e-07,
"loss": 0.3959,
"step": 7060
},
{
"epoch": 0.35,
"grad_norm": 43.03889465332031,
"learning_rate": 2.966666666666667e-07,
"loss": 0.4506,
"step": 7070
},
{
"epoch": 0.35,
"grad_norm": 24.537097930908203,
"learning_rate": 2.9565656565656565e-07,
"loss": 0.6453,
"step": 7080
},
{
"epoch": 0.35,
"grad_norm": 34.157012939453125,
"learning_rate": 2.9464646464646464e-07,
"loss": 0.6124,
"step": 7090
},
{
"epoch": 0.35,
"grad_norm": 42.49196243286133,
"learning_rate": 2.9363636363636364e-07,
"loss": 0.5085,
"step": 7100
},
{
"epoch": 0.36,
"grad_norm": 69.6786880493164,
"learning_rate": 2.926262626262626e-07,
"loss": 0.7614,
"step": 7110
},
{
"epoch": 0.36,
"grad_norm": 50.114356994628906,
"learning_rate": 2.916161616161616e-07,
"loss": 0.5012,
"step": 7120
},
{
"epoch": 0.36,
"grad_norm": 21.865543365478516,
"learning_rate": 2.906060606060606e-07,
"loss": 0.4063,
"step": 7130
},
{
"epoch": 0.36,
"grad_norm": 108.2043685913086,
"learning_rate": 2.895959595959596e-07,
"loss": 0.546,
"step": 7140
},
{
"epoch": 0.36,
"grad_norm": 15.259824752807617,
"learning_rate": 2.885858585858586e-07,
"loss": 0.758,
"step": 7150
},
{
"epoch": 0.36,
"grad_norm": 2.9228439331054688,
"learning_rate": 2.8757575757575757e-07,
"loss": 0.6237,
"step": 7160
},
{
"epoch": 0.36,
"grad_norm": 46.34029769897461,
"learning_rate": 2.8656565656565657e-07,
"loss": 0.5118,
"step": 7170
},
{
"epoch": 0.36,
"grad_norm": 23.95012092590332,
"learning_rate": 2.8555555555555556e-07,
"loss": 0.5845,
"step": 7180
},
{
"epoch": 0.36,
"grad_norm": 15.668420791625977,
"learning_rate": 2.845454545454545e-07,
"loss": 0.3774,
"step": 7190
},
{
"epoch": 0.36,
"grad_norm": 9.104521751403809,
"learning_rate": 2.835353535353535e-07,
"loss": 0.5385,
"step": 7200
},
{
"epoch": 0.36,
"grad_norm": 41.04270553588867,
"learning_rate": 2.825252525252525e-07,
"loss": 0.5438,
"step": 7210
},
{
"epoch": 0.36,
"grad_norm": 39.3018798828125,
"learning_rate": 2.8161616161616157e-07,
"loss": 0.812,
"step": 7220
},
{
"epoch": 0.36,
"grad_norm": 47.075321197509766,
"learning_rate": 2.8060606060606057e-07,
"loss": 0.3639,
"step": 7230
},
{
"epoch": 0.36,
"grad_norm": 34.5875358581543,
"learning_rate": 2.795959595959596e-07,
"loss": 0.6037,
"step": 7240
},
{
"epoch": 0.36,
"grad_norm": 42.29884719848633,
"learning_rate": 2.785858585858586e-07,
"loss": 0.4921,
"step": 7250
},
{
"epoch": 0.36,
"grad_norm": 57.187618255615234,
"learning_rate": 2.7757575757575756e-07,
"loss": 0.7437,
"step": 7260
},
{
"epoch": 0.36,
"grad_norm": 108.54759979248047,
"learning_rate": 2.7656565656565656e-07,
"loss": 0.4292,
"step": 7270
},
{
"epoch": 0.36,
"grad_norm": 45.5640869140625,
"learning_rate": 2.7555555555555555e-07,
"loss": 0.5665,
"step": 7280
},
{
"epoch": 0.36,
"grad_norm": 25.874502182006836,
"learning_rate": 2.7454545454545455e-07,
"loss": 0.5036,
"step": 7290
},
{
"epoch": 0.36,
"grad_norm": 110.4467544555664,
"learning_rate": 2.735353535353535e-07,
"loss": 0.6101,
"step": 7300
},
{
"epoch": 0.37,
"grad_norm": 11.48626708984375,
"learning_rate": 2.725252525252525e-07,
"loss": 0.5754,
"step": 7310
},
{
"epoch": 0.37,
"grad_norm": 19.138206481933594,
"learning_rate": 2.7151515151515154e-07,
"loss": 0.433,
"step": 7320
},
{
"epoch": 0.37,
"grad_norm": 13.751534461975098,
"learning_rate": 2.7050505050505054e-07,
"loss": 0.5466,
"step": 7330
},
{
"epoch": 0.37,
"grad_norm": 43.167144775390625,
"learning_rate": 2.694949494949495e-07,
"loss": 0.5771,
"step": 7340
},
{
"epoch": 0.37,
"grad_norm": 90.9304428100586,
"learning_rate": 2.684848484848485e-07,
"loss": 0.4856,
"step": 7350
},
{
"epoch": 0.37,
"grad_norm": 63.778724670410156,
"learning_rate": 2.674747474747475e-07,
"loss": 0.4655,
"step": 7360
},
{
"epoch": 0.37,
"grad_norm": 29.103656768798828,
"learning_rate": 2.6646464646464647e-07,
"loss": 0.4976,
"step": 7370
},
{
"epoch": 0.37,
"grad_norm": 63.47914123535156,
"learning_rate": 2.654545454545454e-07,
"loss": 0.5243,
"step": 7380
},
{
"epoch": 0.37,
"grad_norm": 29.538002014160156,
"learning_rate": 2.644444444444444e-07,
"loss": 0.5243,
"step": 7390
},
{
"epoch": 0.37,
"grad_norm": 49.40266418457031,
"learning_rate": 2.634343434343434e-07,
"loss": 0.5618,
"step": 7400
},
{
"epoch": 0.37,
"grad_norm": 25.336641311645508,
"learning_rate": 2.6242424242424246e-07,
"loss": 0.4259,
"step": 7410
},
{
"epoch": 0.37,
"grad_norm": 4.6554460525512695,
"learning_rate": 2.614141414141414e-07,
"loss": 0.5528,
"step": 7420
},
{
"epoch": 0.37,
"grad_norm": 28.029766082763672,
"learning_rate": 2.604040404040404e-07,
"loss": 0.6828,
"step": 7430
},
{
"epoch": 0.37,
"grad_norm": 17.596668243408203,
"learning_rate": 2.593939393939394e-07,
"loss": 0.6264,
"step": 7440
},
{
"epoch": 0.37,
"grad_norm": 34.13500213623047,
"learning_rate": 2.583838383838384e-07,
"loss": 0.5018,
"step": 7450
},
{
"epoch": 0.37,
"grad_norm": 74.13536071777344,
"learning_rate": 2.5737373737373734e-07,
"loss": 0.7298,
"step": 7460
},
{
"epoch": 0.37,
"grad_norm": 63.911190032958984,
"learning_rate": 2.5636363636363633e-07,
"loss": 0.5667,
"step": 7470
},
{
"epoch": 0.37,
"grad_norm": 25.39292335510254,
"learning_rate": 2.5535353535353533e-07,
"loss": 0.496,
"step": 7480
},
{
"epoch": 0.37,
"grad_norm": 42.927764892578125,
"learning_rate": 2.5434343434343433e-07,
"loss": 0.4305,
"step": 7490
},
{
"epoch": 0.38,
"grad_norm": 26.835189819335938,
"learning_rate": 2.533333333333333e-07,
"loss": 0.5412,
"step": 7500
},
{
"epoch": 0.38,
"grad_norm": 14.459028244018555,
"learning_rate": 2.523232323232323e-07,
"loss": 0.4731,
"step": 7510
},
{
"epoch": 0.38,
"grad_norm": 41.71003341674805,
"learning_rate": 2.513131313131313e-07,
"loss": 0.3808,
"step": 7520
},
{
"epoch": 0.38,
"grad_norm": 5.8523268699646,
"learning_rate": 2.503030303030303e-07,
"loss": 0.5166,
"step": 7530
},
{
"epoch": 0.38,
"grad_norm": 11.060973167419434,
"learning_rate": 2.4929292929292926e-07,
"loss": 0.4462,
"step": 7540
},
{
"epoch": 0.38,
"grad_norm": 16.44357681274414,
"learning_rate": 2.4828282828282826e-07,
"loss": 0.5186,
"step": 7550
},
{
"epoch": 0.38,
"grad_norm": 20.26371192932129,
"learning_rate": 2.4727272727272725e-07,
"loss": 0.4988,
"step": 7560
},
{
"epoch": 0.38,
"grad_norm": 9.472707748413086,
"learning_rate": 2.4626262626262625e-07,
"loss": 0.4884,
"step": 7570
},
{
"epoch": 0.38,
"grad_norm": 26.50881004333496,
"learning_rate": 2.4525252525252525e-07,
"loss": 0.4872,
"step": 7580
},
{
"epoch": 0.38,
"grad_norm": 0.0010806540958583355,
"learning_rate": 2.4424242424242424e-07,
"loss": 0.496,
"step": 7590
},
{
"epoch": 0.38,
"grad_norm": 6.305263996124268,
"learning_rate": 2.4323232323232324e-07,
"loss": 0.3558,
"step": 7600
},
{
"epoch": 0.38,
"grad_norm": 39.25360870361328,
"learning_rate": 2.4222222222222224e-07,
"loss": 0.4699,
"step": 7610
},
{
"epoch": 0.38,
"grad_norm": 11.405289649963379,
"learning_rate": 2.412121212121212e-07,
"loss": 0.436,
"step": 7620
},
{
"epoch": 0.38,
"grad_norm": 29.754093170166016,
"learning_rate": 2.402020202020202e-07,
"loss": 0.4108,
"step": 7630
},
{
"epoch": 0.38,
"grad_norm": 12.420821189880371,
"learning_rate": 2.391919191919192e-07,
"loss": 0.4973,
"step": 7640
},
{
"epoch": 0.38,
"grad_norm": 39.9239387512207,
"learning_rate": 2.3818181818181817e-07,
"loss": 0.6269,
"step": 7650
},
{
"epoch": 0.38,
"grad_norm": 20.858978271484375,
"learning_rate": 2.3717171717171717e-07,
"loss": 0.6881,
"step": 7660
},
{
"epoch": 0.38,
"grad_norm": 22.475914001464844,
"learning_rate": 2.3616161616161614e-07,
"loss": 0.7368,
"step": 7670
},
{
"epoch": 0.38,
"grad_norm": 73.21558380126953,
"learning_rate": 2.3515151515151514e-07,
"loss": 0.4758,
"step": 7680
},
{
"epoch": 0.38,
"grad_norm": 44.87932205200195,
"learning_rate": 2.3414141414141413e-07,
"loss": 0.6987,
"step": 7690
},
{
"epoch": 0.39,
"grad_norm": 29.088468551635742,
"learning_rate": 2.3313131313131313e-07,
"loss": 0.5054,
"step": 7700
},
{
"epoch": 0.39,
"grad_norm": 11.990555763244629,
"learning_rate": 2.321212121212121e-07,
"loss": 0.9092,
"step": 7710
},
{
"epoch": 0.39,
"grad_norm": 8.802298545837402,
"learning_rate": 2.311111111111111e-07,
"loss": 0.6124,
"step": 7720
},
{
"epoch": 0.39,
"grad_norm": 21.477567672729492,
"learning_rate": 2.301010101010101e-07,
"loss": 0.4771,
"step": 7730
},
{
"epoch": 0.39,
"grad_norm": 31.16932487487793,
"learning_rate": 2.290909090909091e-07,
"loss": 0.6304,
"step": 7740
},
{
"epoch": 0.39,
"grad_norm": 24.214622497558594,
"learning_rate": 2.2808080808080806e-07,
"loss": 0.644,
"step": 7750
},
{
"epoch": 0.39,
"grad_norm": 12.653794288635254,
"learning_rate": 2.2707070707070706e-07,
"loss": 0.5335,
"step": 7760
},
{
"epoch": 0.39,
"grad_norm": 38.705955505371094,
"learning_rate": 2.2606060606060606e-07,
"loss": 0.8003,
"step": 7770
},
{
"epoch": 0.39,
"grad_norm": 21.63491439819336,
"learning_rate": 2.2505050505050505e-07,
"loss": 0.449,
"step": 7780
},
{
"epoch": 0.39,
"grad_norm": 29.634883880615234,
"learning_rate": 2.2404040404040402e-07,
"loss": 0.741,
"step": 7790
},
{
"epoch": 0.39,
"grad_norm": 37.86460876464844,
"learning_rate": 2.2303030303030302e-07,
"loss": 0.6848,
"step": 7800
},
{
"epoch": 0.39,
"grad_norm": 14.158483505249023,
"learning_rate": 2.22020202020202e-07,
"loss": 0.61,
"step": 7810
},
{
"epoch": 0.39,
"grad_norm": 12.76289176940918,
"learning_rate": 2.2101010101010101e-07,
"loss": 0.3045,
"step": 7820
},
{
"epoch": 0.39,
"grad_norm": 84.57695770263672,
"learning_rate": 2.1999999999999998e-07,
"loss": 0.5536,
"step": 7830
},
{
"epoch": 0.39,
"grad_norm": 15.674383163452148,
"learning_rate": 2.1898989898989898e-07,
"loss": 0.4987,
"step": 7840
},
{
"epoch": 0.39,
"grad_norm": 21.316478729248047,
"learning_rate": 2.1797979797979795e-07,
"loss": 0.4829,
"step": 7850
},
{
"epoch": 0.39,
"grad_norm": 0.0016334312967956066,
"learning_rate": 2.1696969696969698e-07,
"loss": 0.6145,
"step": 7860
},
{
"epoch": 0.39,
"grad_norm": 0.021676035597920418,
"learning_rate": 2.1595959595959595e-07,
"loss": 0.3605,
"step": 7870
},
{
"epoch": 0.39,
"grad_norm": 49.891990661621094,
"learning_rate": 2.1494949494949494e-07,
"loss": 0.6608,
"step": 7880
},
{
"epoch": 0.39,
"grad_norm": 30.2360897064209,
"learning_rate": 2.139393939393939e-07,
"loss": 0.4205,
"step": 7890
},
{
"epoch": 0.4,
"grad_norm": 0.003311208914965391,
"learning_rate": 2.1292929292929294e-07,
"loss": 0.4262,
"step": 7900
},
{
"epoch": 0.4,
"grad_norm": 14.92242431640625,
"learning_rate": 2.119191919191919e-07,
"loss": 0.4505,
"step": 7910
},
{
"epoch": 0.4,
"grad_norm": 24.65119171142578,
"learning_rate": 2.109090909090909e-07,
"loss": 0.5488,
"step": 7920
},
{
"epoch": 0.4,
"grad_norm": 36.82573318481445,
"learning_rate": 2.0989898989898987e-07,
"loss": 0.434,
"step": 7930
},
{
"epoch": 0.4,
"grad_norm": 53.093685150146484,
"learning_rate": 2.088888888888889e-07,
"loss": 0.6812,
"step": 7940
},
{
"epoch": 0.4,
"grad_norm": 0.0008963773143477738,
"learning_rate": 2.0787878787878787e-07,
"loss": 0.6032,
"step": 7950
},
{
"epoch": 0.4,
"grad_norm": 23.73720932006836,
"learning_rate": 2.0686868686868686e-07,
"loss": 0.4269,
"step": 7960
},
{
"epoch": 0.4,
"grad_norm": 2.7195699214935303,
"learning_rate": 2.0585858585858584e-07,
"loss": 0.3908,
"step": 7970
},
{
"epoch": 0.4,
"grad_norm": 7.3523173332214355,
"learning_rate": 2.0484848484848483e-07,
"loss": 0.555,
"step": 7980
},
{
"epoch": 0.4,
"grad_norm": 54.250423431396484,
"learning_rate": 2.0383838383838383e-07,
"loss": 0.5805,
"step": 7990
},
{
"epoch": 0.4,
"grad_norm": 22.591411590576172,
"learning_rate": 2.0282828282828283e-07,
"loss": 0.5066,
"step": 8000
},
{
"epoch": 0.4,
"eval_loss": 0.52889084815979,
"eval_runtime": 274.4046,
"eval_samples_per_second": 3.644,
"eval_steps_per_second": 3.644,
"step": 8000
}
],
"logging_steps": 10,
"max_steps": 10000,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 2000,
"total_flos": 3.77128394686464e+17,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}