Wanff
Add fine-tuned model
eaf6bf0
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 1.0,
"eval_steps": 0,
"global_step": 495,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.00202020202020202,
"grad_norm": 0.466796875,
"learning_rate": 9.97979797979798e-06,
"loss": 1.793,
"step": 1
},
{
"epoch": 0.00404040404040404,
"grad_norm": 0.4453125,
"learning_rate": 9.95959595959596e-06,
"loss": 1.8337,
"step": 2
},
{
"epoch": 0.006060606060606061,
"grad_norm": 0.4296875,
"learning_rate": 9.939393939393939e-06,
"loss": 1.829,
"step": 3
},
{
"epoch": 0.00808080808080808,
"grad_norm": 0.43359375,
"learning_rate": 9.91919191919192e-06,
"loss": 1.7952,
"step": 4
},
{
"epoch": 0.010101010101010102,
"grad_norm": 0.412109375,
"learning_rate": 9.8989898989899e-06,
"loss": 1.8804,
"step": 5
},
{
"epoch": 0.012121212121212121,
"grad_norm": 0.5390625,
"learning_rate": 9.87878787878788e-06,
"loss": 1.8649,
"step": 6
},
{
"epoch": 0.014141414141414142,
"grad_norm": 0.365234375,
"learning_rate": 9.85858585858586e-06,
"loss": 1.8901,
"step": 7
},
{
"epoch": 0.01616161616161616,
"grad_norm": 0.33203125,
"learning_rate": 9.838383838383839e-06,
"loss": 1.7955,
"step": 8
},
{
"epoch": 0.01818181818181818,
"grad_norm": 0.32421875,
"learning_rate": 9.81818181818182e-06,
"loss": 1.8151,
"step": 9
},
{
"epoch": 0.020202020202020204,
"grad_norm": 0.310546875,
"learning_rate": 9.797979797979798e-06,
"loss": 1.7557,
"step": 10
},
{
"epoch": 0.022222222222222223,
"grad_norm": 0.31640625,
"learning_rate": 9.777777777777779e-06,
"loss": 1.7174,
"step": 11
},
{
"epoch": 0.024242424242424242,
"grad_norm": 0.287109375,
"learning_rate": 9.757575757575758e-06,
"loss": 1.7118,
"step": 12
},
{
"epoch": 0.026262626262626262,
"grad_norm": 0.279296875,
"learning_rate": 9.737373737373738e-06,
"loss": 1.5918,
"step": 13
},
{
"epoch": 0.028282828282828285,
"grad_norm": 0.302734375,
"learning_rate": 9.717171717171719e-06,
"loss": 1.6372,
"step": 14
},
{
"epoch": 0.030303030303030304,
"grad_norm": 0.283203125,
"learning_rate": 9.696969696969698e-06,
"loss": 1.6647,
"step": 15
},
{
"epoch": 0.03232323232323232,
"grad_norm": 0.296875,
"learning_rate": 9.676767676767678e-06,
"loss": 1.6657,
"step": 16
},
{
"epoch": 0.03434343434343434,
"grad_norm": 0.283203125,
"learning_rate": 9.656565656565657e-06,
"loss": 1.4841,
"step": 17
},
{
"epoch": 0.03636363636363636,
"grad_norm": 0.2890625,
"learning_rate": 9.636363636363638e-06,
"loss": 1.5899,
"step": 18
},
{
"epoch": 0.03838383838383838,
"grad_norm": 0.29296875,
"learning_rate": 9.616161616161616e-06,
"loss": 1.6603,
"step": 19
},
{
"epoch": 0.04040404040404041,
"grad_norm": 0.275390625,
"learning_rate": 9.595959595959597e-06,
"loss": 1.5316,
"step": 20
},
{
"epoch": 0.04242424242424243,
"grad_norm": 0.30859375,
"learning_rate": 9.575757575757576e-06,
"loss": 1.6512,
"step": 21
},
{
"epoch": 0.044444444444444446,
"grad_norm": 0.25390625,
"learning_rate": 9.555555555555556e-06,
"loss": 1.499,
"step": 22
},
{
"epoch": 0.046464646464646465,
"grad_norm": 0.263671875,
"learning_rate": 9.535353535353537e-06,
"loss": 1.5049,
"step": 23
},
{
"epoch": 0.048484848484848485,
"grad_norm": 0.25,
"learning_rate": 9.515151515151516e-06,
"loss": 1.5328,
"step": 24
},
{
"epoch": 0.050505050505050504,
"grad_norm": 0.240234375,
"learning_rate": 9.494949494949497e-06,
"loss": 1.5673,
"step": 25
},
{
"epoch": 0.052525252525252523,
"grad_norm": 0.259765625,
"learning_rate": 9.474747474747475e-06,
"loss": 1.5331,
"step": 26
},
{
"epoch": 0.05454545454545454,
"grad_norm": 0.2373046875,
"learning_rate": 9.454545454545456e-06,
"loss": 1.5281,
"step": 27
},
{
"epoch": 0.05656565656565657,
"grad_norm": 0.220703125,
"learning_rate": 9.434343434343435e-06,
"loss": 1.4541,
"step": 28
},
{
"epoch": 0.05858585858585859,
"grad_norm": 0.2431640625,
"learning_rate": 9.414141414141414e-06,
"loss": 1.5046,
"step": 29
},
{
"epoch": 0.06060606060606061,
"grad_norm": 0.234375,
"learning_rate": 9.393939393939396e-06,
"loss": 1.4657,
"step": 30
},
{
"epoch": 0.06262626262626263,
"grad_norm": 0.2275390625,
"learning_rate": 9.373737373737375e-06,
"loss": 1.3571,
"step": 31
},
{
"epoch": 0.06464646464646465,
"grad_norm": 0.296875,
"learning_rate": 9.353535353535354e-06,
"loss": 1.3992,
"step": 32
},
{
"epoch": 0.06666666666666667,
"grad_norm": 0.287109375,
"learning_rate": 9.333333333333334e-06,
"loss": 1.4552,
"step": 33
},
{
"epoch": 0.06868686868686869,
"grad_norm": 0.216796875,
"learning_rate": 9.313131313131313e-06,
"loss": 1.4571,
"step": 34
},
{
"epoch": 0.0707070707070707,
"grad_norm": 0.2265625,
"learning_rate": 9.292929292929294e-06,
"loss": 1.4078,
"step": 35
},
{
"epoch": 0.07272727272727272,
"grad_norm": 0.279296875,
"learning_rate": 9.272727272727273e-06,
"loss": 1.3755,
"step": 36
},
{
"epoch": 0.07474747474747474,
"grad_norm": 0.2021484375,
"learning_rate": 9.252525252525253e-06,
"loss": 1.3593,
"step": 37
},
{
"epoch": 0.07676767676767676,
"grad_norm": 0.1923828125,
"learning_rate": 9.232323232323232e-06,
"loss": 1.2517,
"step": 38
},
{
"epoch": 0.07878787878787878,
"grad_norm": 0.33984375,
"learning_rate": 9.212121212121213e-06,
"loss": 1.3804,
"step": 39
},
{
"epoch": 0.08080808080808081,
"grad_norm": 0.2001953125,
"learning_rate": 9.191919191919193e-06,
"loss": 1.3428,
"step": 40
},
{
"epoch": 0.08282828282828283,
"grad_norm": 0.2109375,
"learning_rate": 9.171717171717172e-06,
"loss": 1.3576,
"step": 41
},
{
"epoch": 0.08484848484848485,
"grad_norm": 0.197265625,
"learning_rate": 9.151515151515153e-06,
"loss": 1.3772,
"step": 42
},
{
"epoch": 0.08686868686868687,
"grad_norm": 0.1826171875,
"learning_rate": 9.131313131313132e-06,
"loss": 1.2995,
"step": 43
},
{
"epoch": 0.08888888888888889,
"grad_norm": 0.185546875,
"learning_rate": 9.111111111111112e-06,
"loss": 1.3324,
"step": 44
},
{
"epoch": 0.09090909090909091,
"grad_norm": 0.1953125,
"learning_rate": 9.090909090909091e-06,
"loss": 1.324,
"step": 45
},
{
"epoch": 0.09292929292929293,
"grad_norm": 0.1923828125,
"learning_rate": 9.070707070707072e-06,
"loss": 1.3671,
"step": 46
},
{
"epoch": 0.09494949494949495,
"grad_norm": 0.1826171875,
"learning_rate": 9.050505050505052e-06,
"loss": 1.3559,
"step": 47
},
{
"epoch": 0.09696969696969697,
"grad_norm": 0.1728515625,
"learning_rate": 9.030303030303031e-06,
"loss": 1.3197,
"step": 48
},
{
"epoch": 0.09898989898989899,
"grad_norm": 0.171875,
"learning_rate": 9.010101010101012e-06,
"loss": 1.2988,
"step": 49
},
{
"epoch": 0.10101010101010101,
"grad_norm": 0.35546875,
"learning_rate": 8.98989898989899e-06,
"loss": 1.29,
"step": 50
},
{
"epoch": 0.10303030303030303,
"grad_norm": 0.1787109375,
"learning_rate": 8.969696969696971e-06,
"loss": 1.2839,
"step": 51
},
{
"epoch": 0.10505050505050505,
"grad_norm": 0.1650390625,
"learning_rate": 8.94949494949495e-06,
"loss": 1.244,
"step": 52
},
{
"epoch": 0.10707070707070707,
"grad_norm": 0.1689453125,
"learning_rate": 8.92929292929293e-06,
"loss": 1.2877,
"step": 53
},
{
"epoch": 0.10909090909090909,
"grad_norm": 0.1787109375,
"learning_rate": 8.90909090909091e-06,
"loss": 1.269,
"step": 54
},
{
"epoch": 0.1111111111111111,
"grad_norm": 0.171875,
"learning_rate": 8.888888888888888e-06,
"loss": 1.3108,
"step": 55
},
{
"epoch": 0.11313131313131314,
"grad_norm": 0.16796875,
"learning_rate": 8.86868686868687e-06,
"loss": 1.3129,
"step": 56
},
{
"epoch": 0.11515151515151516,
"grad_norm": 0.1728515625,
"learning_rate": 8.84848484848485e-06,
"loss": 1.1976,
"step": 57
},
{
"epoch": 0.11717171717171718,
"grad_norm": 0.169921875,
"learning_rate": 8.82828282828283e-06,
"loss": 1.283,
"step": 58
},
{
"epoch": 0.1191919191919192,
"grad_norm": 0.15625,
"learning_rate": 8.808080808080809e-06,
"loss": 1.2786,
"step": 59
},
{
"epoch": 0.12121212121212122,
"grad_norm": 0.197265625,
"learning_rate": 8.787878787878788e-06,
"loss": 1.2718,
"step": 60
},
{
"epoch": 0.12323232323232323,
"grad_norm": 0.181640625,
"learning_rate": 8.767676767676768e-06,
"loss": 1.3529,
"step": 61
},
{
"epoch": 0.12525252525252525,
"grad_norm": 0.16015625,
"learning_rate": 8.747474747474747e-06,
"loss": 1.1866,
"step": 62
},
{
"epoch": 0.12727272727272726,
"grad_norm": 0.1630859375,
"learning_rate": 8.727272727272728e-06,
"loss": 1.2852,
"step": 63
},
{
"epoch": 0.1292929292929293,
"grad_norm": 0.171875,
"learning_rate": 8.707070707070707e-06,
"loss": 1.2611,
"step": 64
},
{
"epoch": 0.13131313131313133,
"grad_norm": 0.2451171875,
"learning_rate": 8.686868686868687e-06,
"loss": 1.1993,
"step": 65
},
{
"epoch": 0.13333333333333333,
"grad_norm": 0.1572265625,
"learning_rate": 8.666666666666668e-06,
"loss": 1.2068,
"step": 66
},
{
"epoch": 0.13535353535353536,
"grad_norm": 0.1611328125,
"learning_rate": 8.646464646464647e-06,
"loss": 1.2273,
"step": 67
},
{
"epoch": 0.13737373737373737,
"grad_norm": 0.17578125,
"learning_rate": 8.626262626262627e-06,
"loss": 1.1792,
"step": 68
},
{
"epoch": 0.1393939393939394,
"grad_norm": 0.177734375,
"learning_rate": 8.606060606060606e-06,
"loss": 1.2232,
"step": 69
},
{
"epoch": 0.1414141414141414,
"grad_norm": 0.1748046875,
"learning_rate": 8.585858585858587e-06,
"loss": 1.1552,
"step": 70
},
{
"epoch": 0.14343434343434344,
"grad_norm": 0.1533203125,
"learning_rate": 8.565656565656566e-06,
"loss": 1.1916,
"step": 71
},
{
"epoch": 0.14545454545454545,
"grad_norm": 0.1552734375,
"learning_rate": 8.545454545454546e-06,
"loss": 1.1712,
"step": 72
},
{
"epoch": 0.14747474747474748,
"grad_norm": 0.162109375,
"learning_rate": 8.525252525252527e-06,
"loss": 1.2027,
"step": 73
},
{
"epoch": 0.1494949494949495,
"grad_norm": 0.1787109375,
"learning_rate": 8.505050505050506e-06,
"loss": 1.2892,
"step": 74
},
{
"epoch": 0.15151515151515152,
"grad_norm": 0.150390625,
"learning_rate": 8.484848484848486e-06,
"loss": 1.1894,
"step": 75
},
{
"epoch": 0.15353535353535352,
"grad_norm": 0.158203125,
"learning_rate": 8.464646464646465e-06,
"loss": 1.1829,
"step": 76
},
{
"epoch": 0.15555555555555556,
"grad_norm": 0.1455078125,
"learning_rate": 8.444444444444446e-06,
"loss": 1.1127,
"step": 77
},
{
"epoch": 0.15757575757575756,
"grad_norm": 0.1533203125,
"learning_rate": 8.424242424242425e-06,
"loss": 1.2093,
"step": 78
},
{
"epoch": 0.1595959595959596,
"grad_norm": 0.150390625,
"learning_rate": 8.404040404040405e-06,
"loss": 1.1844,
"step": 79
},
{
"epoch": 0.16161616161616163,
"grad_norm": 0.1650390625,
"learning_rate": 8.383838383838384e-06,
"loss": 1.175,
"step": 80
},
{
"epoch": 0.16363636363636364,
"grad_norm": 0.1962890625,
"learning_rate": 8.363636363636365e-06,
"loss": 1.1569,
"step": 81
},
{
"epoch": 0.16565656565656567,
"grad_norm": 0.197265625,
"learning_rate": 8.343434343434345e-06,
"loss": 1.2603,
"step": 82
},
{
"epoch": 0.16767676767676767,
"grad_norm": 0.1484375,
"learning_rate": 8.323232323232324e-06,
"loss": 1.1733,
"step": 83
},
{
"epoch": 0.1696969696969697,
"grad_norm": 0.1591796875,
"learning_rate": 8.303030303030305e-06,
"loss": 1.2082,
"step": 84
},
{
"epoch": 0.1717171717171717,
"grad_norm": 0.1982421875,
"learning_rate": 8.282828282828283e-06,
"loss": 1.1991,
"step": 85
},
{
"epoch": 0.17373737373737375,
"grad_norm": 0.15234375,
"learning_rate": 8.262626262626264e-06,
"loss": 1.1648,
"step": 86
},
{
"epoch": 0.17575757575757575,
"grad_norm": 0.2216796875,
"learning_rate": 8.242424242424243e-06,
"loss": 1.1292,
"step": 87
},
{
"epoch": 0.17777777777777778,
"grad_norm": 0.1552734375,
"learning_rate": 8.222222222222222e-06,
"loss": 1.1936,
"step": 88
},
{
"epoch": 0.1797979797979798,
"grad_norm": 0.1728515625,
"learning_rate": 8.202020202020202e-06,
"loss": 1.1531,
"step": 89
},
{
"epoch": 0.18181818181818182,
"grad_norm": 0.1513671875,
"learning_rate": 8.181818181818183e-06,
"loss": 1.1727,
"step": 90
},
{
"epoch": 0.18383838383838383,
"grad_norm": 0.162109375,
"learning_rate": 8.161616161616162e-06,
"loss": 1.1873,
"step": 91
},
{
"epoch": 0.18585858585858586,
"grad_norm": 0.1953125,
"learning_rate": 8.141414141414142e-06,
"loss": 1.1735,
"step": 92
},
{
"epoch": 0.18787878787878787,
"grad_norm": 0.1923828125,
"learning_rate": 8.121212121212121e-06,
"loss": 1.1552,
"step": 93
},
{
"epoch": 0.1898989898989899,
"grad_norm": 0.1669921875,
"learning_rate": 8.101010101010102e-06,
"loss": 1.1441,
"step": 94
},
{
"epoch": 0.1919191919191919,
"grad_norm": 0.1728515625,
"learning_rate": 8.08080808080808e-06,
"loss": 1.164,
"step": 95
},
{
"epoch": 0.19393939393939394,
"grad_norm": 0.1552734375,
"learning_rate": 8.060606060606061e-06,
"loss": 1.1634,
"step": 96
},
{
"epoch": 0.19595959595959597,
"grad_norm": 0.224609375,
"learning_rate": 8.04040404040404e-06,
"loss": 1.1828,
"step": 97
},
{
"epoch": 0.19797979797979798,
"grad_norm": 0.181640625,
"learning_rate": 8.02020202020202e-06,
"loss": 1.1501,
"step": 98
},
{
"epoch": 0.2,
"grad_norm": 0.166015625,
"learning_rate": 8.000000000000001e-06,
"loss": 1.1776,
"step": 99
},
{
"epoch": 0.20202020202020202,
"grad_norm": 0.1533203125,
"learning_rate": 7.97979797979798e-06,
"loss": 1.1401,
"step": 100
},
{
"epoch": 0.20404040404040405,
"grad_norm": 0.15234375,
"learning_rate": 7.95959595959596e-06,
"loss": 1.1554,
"step": 101
},
{
"epoch": 0.20606060606060606,
"grad_norm": 0.166015625,
"learning_rate": 7.93939393939394e-06,
"loss": 1.2024,
"step": 102
},
{
"epoch": 0.2080808080808081,
"grad_norm": 0.154296875,
"learning_rate": 7.91919191919192e-06,
"loss": 1.141,
"step": 103
},
{
"epoch": 0.2101010101010101,
"grad_norm": 0.158203125,
"learning_rate": 7.898989898989899e-06,
"loss": 1.1776,
"step": 104
},
{
"epoch": 0.21212121212121213,
"grad_norm": 0.1640625,
"learning_rate": 7.87878787878788e-06,
"loss": 1.1885,
"step": 105
},
{
"epoch": 0.21414141414141413,
"grad_norm": 0.201171875,
"learning_rate": 7.858585858585859e-06,
"loss": 1.1369,
"step": 106
},
{
"epoch": 0.21616161616161617,
"grad_norm": 0.162109375,
"learning_rate": 7.838383838383839e-06,
"loss": 1.1101,
"step": 107
},
{
"epoch": 0.21818181818181817,
"grad_norm": 0.185546875,
"learning_rate": 7.81818181818182e-06,
"loss": 1.1358,
"step": 108
},
{
"epoch": 0.2202020202020202,
"grad_norm": 0.1689453125,
"learning_rate": 7.797979797979799e-06,
"loss": 1.1267,
"step": 109
},
{
"epoch": 0.2222222222222222,
"grad_norm": 0.1689453125,
"learning_rate": 7.77777777777778e-06,
"loss": 1.1339,
"step": 110
},
{
"epoch": 0.22424242424242424,
"grad_norm": 0.19140625,
"learning_rate": 7.757575757575758e-06,
"loss": 1.0777,
"step": 111
},
{
"epoch": 0.22626262626262628,
"grad_norm": 0.16015625,
"learning_rate": 7.737373737373739e-06,
"loss": 1.1315,
"step": 112
},
{
"epoch": 0.22828282828282828,
"grad_norm": 0.15234375,
"learning_rate": 7.717171717171717e-06,
"loss": 1.096,
"step": 113
},
{
"epoch": 0.23030303030303031,
"grad_norm": 0.1787109375,
"learning_rate": 7.696969696969696e-06,
"loss": 1.1127,
"step": 114
},
{
"epoch": 0.23232323232323232,
"grad_norm": 0.1904296875,
"learning_rate": 7.676767676767677e-06,
"loss": 1.181,
"step": 115
},
{
"epoch": 0.23434343434343435,
"grad_norm": 0.1630859375,
"learning_rate": 7.656565656565658e-06,
"loss": 1.1442,
"step": 116
},
{
"epoch": 0.23636363636363636,
"grad_norm": 0.208984375,
"learning_rate": 7.636363636363638e-06,
"loss": 1.1313,
"step": 117
},
{
"epoch": 0.2383838383838384,
"grad_norm": 0.1630859375,
"learning_rate": 7.616161616161617e-06,
"loss": 1.1727,
"step": 118
},
{
"epoch": 0.2404040404040404,
"grad_norm": 0.16015625,
"learning_rate": 7.595959595959597e-06,
"loss": 1.0959,
"step": 119
},
{
"epoch": 0.24242424242424243,
"grad_norm": 0.2197265625,
"learning_rate": 7.5757575757575764e-06,
"loss": 1.1908,
"step": 120
},
{
"epoch": 0.24444444444444444,
"grad_norm": 0.197265625,
"learning_rate": 7.555555555555556e-06,
"loss": 1.1209,
"step": 121
},
{
"epoch": 0.24646464646464647,
"grad_norm": 0.1591796875,
"learning_rate": 7.535353535353536e-06,
"loss": 1.1523,
"step": 122
},
{
"epoch": 0.24848484848484848,
"grad_norm": 0.1767578125,
"learning_rate": 7.515151515151516e-06,
"loss": 1.1379,
"step": 123
},
{
"epoch": 0.2505050505050505,
"grad_norm": 0.1865234375,
"learning_rate": 7.494949494949496e-06,
"loss": 1.1438,
"step": 124
},
{
"epoch": 0.25252525252525254,
"grad_norm": 0.16015625,
"learning_rate": 7.474747474747476e-06,
"loss": 1.1362,
"step": 125
},
{
"epoch": 0.2545454545454545,
"grad_norm": 0.1630859375,
"learning_rate": 7.454545454545456e-06,
"loss": 1.118,
"step": 126
},
{
"epoch": 0.25656565656565655,
"grad_norm": 0.17578125,
"learning_rate": 7.434343434343435e-06,
"loss": 1.0866,
"step": 127
},
{
"epoch": 0.2585858585858586,
"grad_norm": 0.2353515625,
"learning_rate": 7.414141414141415e-06,
"loss": 1.0837,
"step": 128
},
{
"epoch": 0.2606060606060606,
"grad_norm": 0.177734375,
"learning_rate": 7.393939393939395e-06,
"loss": 1.1266,
"step": 129
},
{
"epoch": 0.26262626262626265,
"grad_norm": 0.162109375,
"learning_rate": 7.373737373737374e-06,
"loss": 1.164,
"step": 130
},
{
"epoch": 0.26464646464646463,
"grad_norm": 0.177734375,
"learning_rate": 7.353535353535353e-06,
"loss": 1.071,
"step": 131
},
{
"epoch": 0.26666666666666666,
"grad_norm": 0.19140625,
"learning_rate": 7.333333333333333e-06,
"loss": 1.1683,
"step": 132
},
{
"epoch": 0.2686868686868687,
"grad_norm": 0.2177734375,
"learning_rate": 7.3131313131313146e-06,
"loss": 1.1239,
"step": 133
},
{
"epoch": 0.27070707070707073,
"grad_norm": 0.16015625,
"learning_rate": 7.2929292929292934e-06,
"loss": 1.1213,
"step": 134
},
{
"epoch": 0.2727272727272727,
"grad_norm": 0.158203125,
"learning_rate": 7.272727272727273e-06,
"loss": 1.0712,
"step": 135
},
{
"epoch": 0.27474747474747474,
"grad_norm": 0.1630859375,
"learning_rate": 7.252525252525253e-06,
"loss": 1.109,
"step": 136
},
{
"epoch": 0.2767676767676768,
"grad_norm": 0.75,
"learning_rate": 7.232323232323233e-06,
"loss": 1.113,
"step": 137
},
{
"epoch": 0.2787878787878788,
"grad_norm": 0.1640625,
"learning_rate": 7.212121212121212e-06,
"loss": 1.0855,
"step": 138
},
{
"epoch": 0.2808080808080808,
"grad_norm": 0.1630859375,
"learning_rate": 7.191919191919192e-06,
"loss": 1.1196,
"step": 139
},
{
"epoch": 0.2828282828282828,
"grad_norm": 0.1884765625,
"learning_rate": 7.171717171717172e-06,
"loss": 1.1559,
"step": 140
},
{
"epoch": 0.28484848484848485,
"grad_norm": 0.1953125,
"learning_rate": 7.151515151515152e-06,
"loss": 1.1031,
"step": 141
},
{
"epoch": 0.2868686868686869,
"grad_norm": 0.16015625,
"learning_rate": 7.131313131313132e-06,
"loss": 1.0808,
"step": 142
},
{
"epoch": 0.28888888888888886,
"grad_norm": 0.193359375,
"learning_rate": 7.111111111111112e-06,
"loss": 1.0975,
"step": 143
},
{
"epoch": 0.2909090909090909,
"grad_norm": 0.1845703125,
"learning_rate": 7.0909090909090916e-06,
"loss": 1.1093,
"step": 144
},
{
"epoch": 0.29292929292929293,
"grad_norm": 0.166015625,
"learning_rate": 7.070707070707071e-06,
"loss": 1.1066,
"step": 145
},
{
"epoch": 0.29494949494949496,
"grad_norm": 0.20703125,
"learning_rate": 7.050505050505051e-06,
"loss": 1.1233,
"step": 146
},
{
"epoch": 0.296969696969697,
"grad_norm": 0.1748046875,
"learning_rate": 7.030303030303031e-06,
"loss": 1.1179,
"step": 147
},
{
"epoch": 0.298989898989899,
"grad_norm": 0.177734375,
"learning_rate": 7.0101010101010105e-06,
"loss": 1.1154,
"step": 148
},
{
"epoch": 0.301010101010101,
"grad_norm": 0.1953125,
"learning_rate": 6.98989898989899e-06,
"loss": 1.0917,
"step": 149
},
{
"epoch": 0.30303030303030304,
"grad_norm": 0.19140625,
"learning_rate": 6.969696969696971e-06,
"loss": 1.0864,
"step": 150
},
{
"epoch": 0.30505050505050507,
"grad_norm": 0.1962890625,
"learning_rate": 6.9494949494949505e-06,
"loss": 1.1178,
"step": 151
},
{
"epoch": 0.30707070707070705,
"grad_norm": 0.1630859375,
"learning_rate": 6.92929292929293e-06,
"loss": 1.1425,
"step": 152
},
{
"epoch": 0.3090909090909091,
"grad_norm": 0.1728515625,
"learning_rate": 6.90909090909091e-06,
"loss": 1.0966,
"step": 153
},
{
"epoch": 0.3111111111111111,
"grad_norm": 0.25,
"learning_rate": 6.88888888888889e-06,
"loss": 1.0981,
"step": 154
},
{
"epoch": 0.31313131313131315,
"grad_norm": 0.1650390625,
"learning_rate": 6.868686868686869e-06,
"loss": 1.0782,
"step": 155
},
{
"epoch": 0.3151515151515151,
"grad_norm": 0.1875,
"learning_rate": 6.848484848484849e-06,
"loss": 1.1391,
"step": 156
},
{
"epoch": 0.31717171717171716,
"grad_norm": 0.1689453125,
"learning_rate": 6.828282828282828e-06,
"loss": 1.0521,
"step": 157
},
{
"epoch": 0.3191919191919192,
"grad_norm": 0.2119140625,
"learning_rate": 6.808080808080809e-06,
"loss": 1.0959,
"step": 158
},
{
"epoch": 0.3212121212121212,
"grad_norm": 0.1669921875,
"learning_rate": 6.787878787878789e-06,
"loss": 1.0973,
"step": 159
},
{
"epoch": 0.32323232323232326,
"grad_norm": 0.1630859375,
"learning_rate": 6.767676767676769e-06,
"loss": 1.0526,
"step": 160
},
{
"epoch": 0.32525252525252524,
"grad_norm": 0.201171875,
"learning_rate": 6.747474747474749e-06,
"loss": 1.0886,
"step": 161
},
{
"epoch": 0.32727272727272727,
"grad_norm": 0.1650390625,
"learning_rate": 6.7272727272727275e-06,
"loss": 1.0519,
"step": 162
},
{
"epoch": 0.3292929292929293,
"grad_norm": 0.224609375,
"learning_rate": 6.707070707070707e-06,
"loss": 1.0623,
"step": 163
},
{
"epoch": 0.33131313131313134,
"grad_norm": 0.3359375,
"learning_rate": 6.686868686868687e-06,
"loss": 1.0819,
"step": 164
},
{
"epoch": 0.3333333333333333,
"grad_norm": 0.205078125,
"learning_rate": 6.666666666666667e-06,
"loss": 1.1033,
"step": 165
},
{
"epoch": 0.33535353535353535,
"grad_norm": 0.365234375,
"learning_rate": 6.646464646464646e-06,
"loss": 1.108,
"step": 166
},
{
"epoch": 0.3373737373737374,
"grad_norm": 0.2421875,
"learning_rate": 6.626262626262627e-06,
"loss": 1.0995,
"step": 167
},
{
"epoch": 0.3393939393939394,
"grad_norm": 0.1630859375,
"learning_rate": 6.606060606060607e-06,
"loss": 1.066,
"step": 168
},
{
"epoch": 0.3414141414141414,
"grad_norm": 0.2197265625,
"learning_rate": 6.585858585858586e-06,
"loss": 1.113,
"step": 169
},
{
"epoch": 0.3434343434343434,
"grad_norm": 0.1923828125,
"learning_rate": 6.565656565656566e-06,
"loss": 1.1046,
"step": 170
},
{
"epoch": 0.34545454545454546,
"grad_norm": 0.1669921875,
"learning_rate": 6.545454545454546e-06,
"loss": 1.129,
"step": 171
},
{
"epoch": 0.3474747474747475,
"grad_norm": 0.1845703125,
"learning_rate": 6.525252525252526e-06,
"loss": 1.0712,
"step": 172
},
{
"epoch": 0.34949494949494947,
"grad_norm": 0.166015625,
"learning_rate": 6.505050505050505e-06,
"loss": 1.0549,
"step": 173
},
{
"epoch": 0.3515151515151515,
"grad_norm": 0.1953125,
"learning_rate": 6.484848484848485e-06,
"loss": 1.0983,
"step": 174
},
{
"epoch": 0.35353535353535354,
"grad_norm": 0.1689453125,
"learning_rate": 6.464646464646466e-06,
"loss": 1.1056,
"step": 175
},
{
"epoch": 0.35555555555555557,
"grad_norm": 0.177734375,
"learning_rate": 6.444444444444445e-06,
"loss": 1.1125,
"step": 176
},
{
"epoch": 0.3575757575757576,
"grad_norm": 0.1708984375,
"learning_rate": 6.424242424242425e-06,
"loss": 1.143,
"step": 177
},
{
"epoch": 0.3595959595959596,
"grad_norm": 0.265625,
"learning_rate": 6.404040404040405e-06,
"loss": 1.0382,
"step": 178
},
{
"epoch": 0.3616161616161616,
"grad_norm": 0.1767578125,
"learning_rate": 6.3838383838383845e-06,
"loss": 1.1309,
"step": 179
},
{
"epoch": 0.36363636363636365,
"grad_norm": 0.27734375,
"learning_rate": 6.363636363636364e-06,
"loss": 1.0753,
"step": 180
},
{
"epoch": 0.3656565656565657,
"grad_norm": 0.1728515625,
"learning_rate": 6.343434343434344e-06,
"loss": 1.0436,
"step": 181
},
{
"epoch": 0.36767676767676766,
"grad_norm": 0.189453125,
"learning_rate": 6.323232323232324e-06,
"loss": 1.0809,
"step": 182
},
{
"epoch": 0.3696969696969697,
"grad_norm": 0.1767578125,
"learning_rate": 6.303030303030303e-06,
"loss": 1.0613,
"step": 183
},
{
"epoch": 0.3717171717171717,
"grad_norm": 0.1826171875,
"learning_rate": 6.282828282828284e-06,
"loss": 1.0975,
"step": 184
},
{
"epoch": 0.37373737373737376,
"grad_norm": 0.1708984375,
"learning_rate": 6.262626262626264e-06,
"loss": 1.0774,
"step": 185
},
{
"epoch": 0.37575757575757573,
"grad_norm": 0.1884765625,
"learning_rate": 6.2424242424242434e-06,
"loss": 1.0811,
"step": 186
},
{
"epoch": 0.37777777777777777,
"grad_norm": 0.1845703125,
"learning_rate": 6.222222222222223e-06,
"loss": 1.1261,
"step": 187
},
{
"epoch": 0.3797979797979798,
"grad_norm": 0.1943359375,
"learning_rate": 6.202020202020203e-06,
"loss": 1.0866,
"step": 188
},
{
"epoch": 0.38181818181818183,
"grad_norm": 0.1728515625,
"learning_rate": 6.181818181818182e-06,
"loss": 1.0776,
"step": 189
},
{
"epoch": 0.3838383838383838,
"grad_norm": 0.205078125,
"learning_rate": 6.1616161616161615e-06,
"loss": 1.0764,
"step": 190
},
{
"epoch": 0.38585858585858585,
"grad_norm": 0.32421875,
"learning_rate": 6.141414141414141e-06,
"loss": 1.0798,
"step": 191
},
{
"epoch": 0.3878787878787879,
"grad_norm": 0.1806640625,
"learning_rate": 6.121212121212121e-06,
"loss": 0.987,
"step": 192
},
{
"epoch": 0.3898989898989899,
"grad_norm": 0.1923828125,
"learning_rate": 6.1010101010101015e-06,
"loss": 1.068,
"step": 193
},
{
"epoch": 0.39191919191919194,
"grad_norm": 0.1640625,
"learning_rate": 6.080808080808081e-06,
"loss": 1.0608,
"step": 194
},
{
"epoch": 0.3939393939393939,
"grad_norm": 0.1845703125,
"learning_rate": 6.060606060606061e-06,
"loss": 1.0785,
"step": 195
},
{
"epoch": 0.39595959595959596,
"grad_norm": 0.193359375,
"learning_rate": 6.040404040404041e-06,
"loss": 1.1016,
"step": 196
},
{
"epoch": 0.397979797979798,
"grad_norm": 0.169921875,
"learning_rate": 6.0202020202020204e-06,
"loss": 1.0731,
"step": 197
},
{
"epoch": 0.4,
"grad_norm": 1.75,
"learning_rate": 6e-06,
"loss": 1.0454,
"step": 198
},
{
"epoch": 0.402020202020202,
"grad_norm": 0.1865234375,
"learning_rate": 5.97979797979798e-06,
"loss": 1.0912,
"step": 199
},
{
"epoch": 0.40404040404040403,
"grad_norm": 0.1689453125,
"learning_rate": 5.95959595959596e-06,
"loss": 1.0457,
"step": 200
},
{
"epoch": 0.40606060606060607,
"grad_norm": 0.166015625,
"learning_rate": 5.93939393939394e-06,
"loss": 1.0382,
"step": 201
},
{
"epoch": 0.4080808080808081,
"grad_norm": 0.201171875,
"learning_rate": 5.91919191919192e-06,
"loss": 1.1034,
"step": 202
},
{
"epoch": 0.4101010101010101,
"grad_norm": 0.236328125,
"learning_rate": 5.8989898989899e-06,
"loss": 1.0624,
"step": 203
},
{
"epoch": 0.4121212121212121,
"grad_norm": 0.181640625,
"learning_rate": 5.878787878787879e-06,
"loss": 1.088,
"step": 204
},
{
"epoch": 0.41414141414141414,
"grad_norm": 0.1982421875,
"learning_rate": 5.858585858585859e-06,
"loss": 1.0434,
"step": 205
},
{
"epoch": 0.4161616161616162,
"grad_norm": 0.1728515625,
"learning_rate": 5.838383838383839e-06,
"loss": 1.0536,
"step": 206
},
{
"epoch": 0.41818181818181815,
"grad_norm": 0.2216796875,
"learning_rate": 5.8181818181818185e-06,
"loss": 1.0236,
"step": 207
},
{
"epoch": 0.4202020202020202,
"grad_norm": 0.1904296875,
"learning_rate": 5.797979797979798e-06,
"loss": 1.013,
"step": 208
},
{
"epoch": 0.4222222222222222,
"grad_norm": 0.1787109375,
"learning_rate": 5.777777777777778e-06,
"loss": 1.0562,
"step": 209
},
{
"epoch": 0.42424242424242425,
"grad_norm": 0.1806640625,
"learning_rate": 5.7575757575757586e-06,
"loss": 1.0232,
"step": 210
},
{
"epoch": 0.4262626262626263,
"grad_norm": 0.1787109375,
"learning_rate": 5.737373737373738e-06,
"loss": 1.0425,
"step": 211
},
{
"epoch": 0.42828282828282827,
"grad_norm": 0.173828125,
"learning_rate": 5.717171717171718e-06,
"loss": 1.0381,
"step": 212
},
{
"epoch": 0.4303030303030303,
"grad_norm": 0.197265625,
"learning_rate": 5.696969696969698e-06,
"loss": 1.0393,
"step": 213
},
{
"epoch": 0.43232323232323233,
"grad_norm": 0.26171875,
"learning_rate": 5.6767676767676775e-06,
"loss": 1.0934,
"step": 214
},
{
"epoch": 0.43434343434343436,
"grad_norm": 0.1708984375,
"learning_rate": 5.656565656565657e-06,
"loss": 1.0685,
"step": 215
},
{
"epoch": 0.43636363636363634,
"grad_norm": 0.236328125,
"learning_rate": 5.636363636363636e-06,
"loss": 1.0601,
"step": 216
},
{
"epoch": 0.4383838383838384,
"grad_norm": 0.1748046875,
"learning_rate": 5.616161616161616e-06,
"loss": 1.0923,
"step": 217
},
{
"epoch": 0.4404040404040404,
"grad_norm": 0.1845703125,
"learning_rate": 5.595959595959597e-06,
"loss": 1.0975,
"step": 218
},
{
"epoch": 0.44242424242424244,
"grad_norm": 0.25390625,
"learning_rate": 5.575757575757577e-06,
"loss": 1.098,
"step": 219
},
{
"epoch": 0.4444444444444444,
"grad_norm": 0.1943359375,
"learning_rate": 5.555555555555557e-06,
"loss": 1.0009,
"step": 220
},
{
"epoch": 0.44646464646464645,
"grad_norm": 0.17578125,
"learning_rate": 5.5353535353535355e-06,
"loss": 1.0145,
"step": 221
},
{
"epoch": 0.4484848484848485,
"grad_norm": 0.17578125,
"learning_rate": 5.515151515151515e-06,
"loss": 1.0652,
"step": 222
},
{
"epoch": 0.4505050505050505,
"grad_norm": 0.173828125,
"learning_rate": 5.494949494949495e-06,
"loss": 1.0448,
"step": 223
},
{
"epoch": 0.45252525252525255,
"grad_norm": 0.1728515625,
"learning_rate": 5.474747474747475e-06,
"loss": 1.0548,
"step": 224
},
{
"epoch": 0.45454545454545453,
"grad_norm": 0.2060546875,
"learning_rate": 5.4545454545454545e-06,
"loss": 1.0661,
"step": 225
},
{
"epoch": 0.45656565656565656,
"grad_norm": 0.1826171875,
"learning_rate": 5.434343434343434e-06,
"loss": 1.0833,
"step": 226
},
{
"epoch": 0.4585858585858586,
"grad_norm": 0.1787109375,
"learning_rate": 5.414141414141415e-06,
"loss": 1.0515,
"step": 227
},
{
"epoch": 0.46060606060606063,
"grad_norm": 0.1708984375,
"learning_rate": 5.3939393939393945e-06,
"loss": 1.0603,
"step": 228
},
{
"epoch": 0.4626262626262626,
"grad_norm": 0.1923828125,
"learning_rate": 5.373737373737374e-06,
"loss": 1.073,
"step": 229
},
{
"epoch": 0.46464646464646464,
"grad_norm": 0.1826171875,
"learning_rate": 5.353535353535354e-06,
"loss": 1.0903,
"step": 230
},
{
"epoch": 0.4666666666666667,
"grad_norm": 0.1845703125,
"learning_rate": 5.333333333333334e-06,
"loss": 1.054,
"step": 231
},
{
"epoch": 0.4686868686868687,
"grad_norm": 0.1787109375,
"learning_rate": 5.313131313131313e-06,
"loss": 1.0634,
"step": 232
},
{
"epoch": 0.4707070707070707,
"grad_norm": 0.2275390625,
"learning_rate": 5.292929292929293e-06,
"loss": 1.0463,
"step": 233
},
{
"epoch": 0.4727272727272727,
"grad_norm": 0.1884765625,
"learning_rate": 5.272727272727273e-06,
"loss": 1.0138,
"step": 234
},
{
"epoch": 0.47474747474747475,
"grad_norm": 0.17578125,
"learning_rate": 5.252525252525253e-06,
"loss": 0.9779,
"step": 235
},
{
"epoch": 0.4767676767676768,
"grad_norm": 0.1943359375,
"learning_rate": 5.232323232323233e-06,
"loss": 1.112,
"step": 236
},
{
"epoch": 0.47878787878787876,
"grad_norm": 0.21875,
"learning_rate": 5.212121212121213e-06,
"loss": 1.079,
"step": 237
},
{
"epoch": 0.4808080808080808,
"grad_norm": 0.1826171875,
"learning_rate": 5.191919191919193e-06,
"loss": 1.0357,
"step": 238
},
{
"epoch": 0.48282828282828283,
"grad_norm": 0.30859375,
"learning_rate": 5.171717171717172e-06,
"loss": 1.0545,
"step": 239
},
{
"epoch": 0.48484848484848486,
"grad_norm": 0.1787109375,
"learning_rate": 5.151515151515152e-06,
"loss": 1.0452,
"step": 240
},
{
"epoch": 0.4868686868686869,
"grad_norm": 0.181640625,
"learning_rate": 5.131313131313132e-06,
"loss": 1.0302,
"step": 241
},
{
"epoch": 0.4888888888888889,
"grad_norm": 0.1787109375,
"learning_rate": 5.1111111111111115e-06,
"loss": 1.03,
"step": 242
},
{
"epoch": 0.4909090909090909,
"grad_norm": 0.189453125,
"learning_rate": 5.090909090909091e-06,
"loss": 0.9899,
"step": 243
},
{
"epoch": 0.49292929292929294,
"grad_norm": 0.2080078125,
"learning_rate": 5.070707070707072e-06,
"loss": 1.0448,
"step": 244
},
{
"epoch": 0.494949494949495,
"grad_norm": 0.203125,
"learning_rate": 5.0505050505050515e-06,
"loss": 1.0938,
"step": 245
},
{
"epoch": 0.49696969696969695,
"grad_norm": 0.17578125,
"learning_rate": 5.030303030303031e-06,
"loss": 1.0318,
"step": 246
},
{
"epoch": 0.498989898989899,
"grad_norm": 0.1689453125,
"learning_rate": 5.010101010101011e-06,
"loss": 0.9649,
"step": 247
},
{
"epoch": 0.501010101010101,
"grad_norm": 0.1865234375,
"learning_rate": 4.98989898989899e-06,
"loss": 1.0887,
"step": 248
},
{
"epoch": 0.503030303030303,
"grad_norm": 0.345703125,
"learning_rate": 4.9696969696969696e-06,
"loss": 1.0543,
"step": 249
},
{
"epoch": 0.5050505050505051,
"grad_norm": 0.18359375,
"learning_rate": 4.94949494949495e-06,
"loss": 1.0052,
"step": 250
},
{
"epoch": 0.5070707070707071,
"grad_norm": 0.1904296875,
"learning_rate": 4.92929292929293e-06,
"loss": 1.0364,
"step": 251
},
{
"epoch": 0.509090909090909,
"grad_norm": 0.1796875,
"learning_rate": 4.90909090909091e-06,
"loss": 1.0756,
"step": 252
},
{
"epoch": 0.5111111111111111,
"grad_norm": 0.2041015625,
"learning_rate": 4.888888888888889e-06,
"loss": 1.0512,
"step": 253
},
{
"epoch": 0.5131313131313131,
"grad_norm": 0.201171875,
"learning_rate": 4.868686868686869e-06,
"loss": 1.0372,
"step": 254
},
{
"epoch": 0.5151515151515151,
"grad_norm": 0.28125,
"learning_rate": 4.848484848484849e-06,
"loss": 1.1119,
"step": 255
},
{
"epoch": 0.5171717171717172,
"grad_norm": 0.181640625,
"learning_rate": 4.8282828282828285e-06,
"loss": 1.077,
"step": 256
},
{
"epoch": 0.5191919191919192,
"grad_norm": 0.19140625,
"learning_rate": 4.808080808080808e-06,
"loss": 1.1184,
"step": 257
},
{
"epoch": 0.5212121212121212,
"grad_norm": 0.208984375,
"learning_rate": 4.787878787878788e-06,
"loss": 1.0886,
"step": 258
},
{
"epoch": 0.5232323232323233,
"grad_norm": 0.1875,
"learning_rate": 4.7676767676767685e-06,
"loss": 1.0095,
"step": 259
},
{
"epoch": 0.5252525252525253,
"grad_norm": 0.205078125,
"learning_rate": 4.747474747474748e-06,
"loss": 1.045,
"step": 260
},
{
"epoch": 0.5272727272727272,
"grad_norm": 0.1904296875,
"learning_rate": 4.727272727272728e-06,
"loss": 1.0381,
"step": 261
},
{
"epoch": 0.5292929292929293,
"grad_norm": 0.1826171875,
"learning_rate": 4.707070707070707e-06,
"loss": 1.0379,
"step": 262
},
{
"epoch": 0.5313131313131313,
"grad_norm": 0.1748046875,
"learning_rate": 4.6868686868686874e-06,
"loss": 1.0157,
"step": 263
},
{
"epoch": 0.5333333333333333,
"grad_norm": 0.18359375,
"learning_rate": 4.666666666666667e-06,
"loss": 1.0467,
"step": 264
},
{
"epoch": 0.5353535353535354,
"grad_norm": 0.1796875,
"learning_rate": 4.646464646464647e-06,
"loss": 1.045,
"step": 265
},
{
"epoch": 0.5373737373737374,
"grad_norm": 0.185546875,
"learning_rate": 4.626262626262627e-06,
"loss": 1.0524,
"step": 266
},
{
"epoch": 0.5393939393939394,
"grad_norm": 0.265625,
"learning_rate": 4.606060606060606e-06,
"loss": 1.0835,
"step": 267
},
{
"epoch": 0.5414141414141415,
"grad_norm": 0.1962890625,
"learning_rate": 4.585858585858586e-06,
"loss": 1.0561,
"step": 268
},
{
"epoch": 0.5434343434343434,
"grad_norm": 0.1748046875,
"learning_rate": 4.565656565656566e-06,
"loss": 1.0416,
"step": 269
},
{
"epoch": 0.5454545454545454,
"grad_norm": 0.2158203125,
"learning_rate": 4.5454545454545455e-06,
"loss": 1.0629,
"step": 270
},
{
"epoch": 0.5474747474747474,
"grad_norm": 0.2099609375,
"learning_rate": 4.525252525252526e-06,
"loss": 1.0639,
"step": 271
},
{
"epoch": 0.5494949494949495,
"grad_norm": 0.1904296875,
"learning_rate": 4.505050505050506e-06,
"loss": 1.0732,
"step": 272
},
{
"epoch": 0.5515151515151515,
"grad_norm": 0.1904296875,
"learning_rate": 4.4848484848484855e-06,
"loss": 1.1106,
"step": 273
},
{
"epoch": 0.5535353535353535,
"grad_norm": 0.185546875,
"learning_rate": 4.464646464646465e-06,
"loss": 1.0459,
"step": 274
},
{
"epoch": 0.5555555555555556,
"grad_norm": 0.1943359375,
"learning_rate": 4.444444444444444e-06,
"loss": 1.0037,
"step": 275
},
{
"epoch": 0.5575757575757576,
"grad_norm": 0.197265625,
"learning_rate": 4.424242424242425e-06,
"loss": 1.0369,
"step": 276
},
{
"epoch": 0.5595959595959596,
"grad_norm": 0.234375,
"learning_rate": 4.4040404040404044e-06,
"loss": 1.0844,
"step": 277
},
{
"epoch": 0.5616161616161616,
"grad_norm": 0.18359375,
"learning_rate": 4.383838383838384e-06,
"loss": 1.0097,
"step": 278
},
{
"epoch": 0.5636363636363636,
"grad_norm": 0.1787109375,
"learning_rate": 4.363636363636364e-06,
"loss": 1.0342,
"step": 279
},
{
"epoch": 0.5656565656565656,
"grad_norm": 0.2177734375,
"learning_rate": 4.343434343434344e-06,
"loss": 1.0167,
"step": 280
},
{
"epoch": 0.5676767676767677,
"grad_norm": 0.1796875,
"learning_rate": 4.323232323232323e-06,
"loss": 1.0203,
"step": 281
},
{
"epoch": 0.5696969696969697,
"grad_norm": 0.251953125,
"learning_rate": 4.303030303030303e-06,
"loss": 1.0382,
"step": 282
},
{
"epoch": 0.5717171717171717,
"grad_norm": 0.19921875,
"learning_rate": 4.282828282828283e-06,
"loss": 1.0358,
"step": 283
},
{
"epoch": 0.5737373737373738,
"grad_norm": 0.173828125,
"learning_rate": 4.262626262626263e-06,
"loss": 1.072,
"step": 284
},
{
"epoch": 0.5757575757575758,
"grad_norm": 0.19140625,
"learning_rate": 4.242424242424243e-06,
"loss": 1.0884,
"step": 285
},
{
"epoch": 0.5777777777777777,
"grad_norm": 0.1787109375,
"learning_rate": 4.222222222222223e-06,
"loss": 1.032,
"step": 286
},
{
"epoch": 0.5797979797979798,
"grad_norm": 0.1884765625,
"learning_rate": 4.2020202020202026e-06,
"loss": 1.039,
"step": 287
},
{
"epoch": 0.5818181818181818,
"grad_norm": 0.17578125,
"learning_rate": 4.181818181818182e-06,
"loss": 1.0533,
"step": 288
},
{
"epoch": 0.5838383838383838,
"grad_norm": 0.17578125,
"learning_rate": 4.161616161616162e-06,
"loss": 1.027,
"step": 289
},
{
"epoch": 0.5858585858585859,
"grad_norm": 0.2109375,
"learning_rate": 4.141414141414142e-06,
"loss": 1.046,
"step": 290
},
{
"epoch": 0.5878787878787879,
"grad_norm": 0.17578125,
"learning_rate": 4.1212121212121215e-06,
"loss": 1.0367,
"step": 291
},
{
"epoch": 0.5898989898989899,
"grad_norm": 0.2431640625,
"learning_rate": 4.101010101010101e-06,
"loss": 1.0158,
"step": 292
},
{
"epoch": 0.591919191919192,
"grad_norm": 0.189453125,
"learning_rate": 4.080808080808081e-06,
"loss": 0.9974,
"step": 293
},
{
"epoch": 0.593939393939394,
"grad_norm": 0.1982421875,
"learning_rate": 4.060606060606061e-06,
"loss": 0.9762,
"step": 294
},
{
"epoch": 0.5959595959595959,
"grad_norm": 0.177734375,
"learning_rate": 4.04040404040404e-06,
"loss": 1.0425,
"step": 295
},
{
"epoch": 0.597979797979798,
"grad_norm": 0.1708984375,
"learning_rate": 4.02020202020202e-06,
"loss": 0.9974,
"step": 296
},
{
"epoch": 0.6,
"grad_norm": 0.17578125,
"learning_rate": 4.000000000000001e-06,
"loss": 1.0481,
"step": 297
},
{
"epoch": 0.602020202020202,
"grad_norm": 0.2333984375,
"learning_rate": 3.97979797979798e-06,
"loss": 1.0446,
"step": 298
},
{
"epoch": 0.604040404040404,
"grad_norm": 0.17578125,
"learning_rate": 3.95959595959596e-06,
"loss": 1.0086,
"step": 299
},
{
"epoch": 0.6060606060606061,
"grad_norm": 0.298828125,
"learning_rate": 3.93939393939394e-06,
"loss": 1.1283,
"step": 300
},
{
"epoch": 0.6080808080808081,
"grad_norm": 0.1884765625,
"learning_rate": 3.9191919191919196e-06,
"loss": 1.0657,
"step": 301
},
{
"epoch": 0.6101010101010101,
"grad_norm": 0.208984375,
"learning_rate": 3.898989898989899e-06,
"loss": 0.9508,
"step": 302
},
{
"epoch": 0.6121212121212121,
"grad_norm": 0.1923828125,
"learning_rate": 3.878787878787879e-06,
"loss": 1.0725,
"step": 303
},
{
"epoch": 0.6141414141414141,
"grad_norm": 0.1953125,
"learning_rate": 3.858585858585859e-06,
"loss": 1.0317,
"step": 304
},
{
"epoch": 0.6161616161616161,
"grad_norm": 0.1826171875,
"learning_rate": 3.8383838383838385e-06,
"loss": 1.0252,
"step": 305
},
{
"epoch": 0.6181818181818182,
"grad_norm": 0.2001953125,
"learning_rate": 3.818181818181819e-06,
"loss": 0.9875,
"step": 306
},
{
"epoch": 0.6202020202020202,
"grad_norm": 0.3984375,
"learning_rate": 3.7979797979797984e-06,
"loss": 1.0311,
"step": 307
},
{
"epoch": 0.6222222222222222,
"grad_norm": 0.18359375,
"learning_rate": 3.777777777777778e-06,
"loss": 1.0357,
"step": 308
},
{
"epoch": 0.6242424242424243,
"grad_norm": 0.2060546875,
"learning_rate": 3.757575757575758e-06,
"loss": 1.019,
"step": 309
},
{
"epoch": 0.6262626262626263,
"grad_norm": 0.181640625,
"learning_rate": 3.737373737373738e-06,
"loss": 1.0169,
"step": 310
},
{
"epoch": 0.6282828282828283,
"grad_norm": 0.1845703125,
"learning_rate": 3.7171717171717177e-06,
"loss": 1.0063,
"step": 311
},
{
"epoch": 0.6303030303030303,
"grad_norm": 0.2392578125,
"learning_rate": 3.6969696969696974e-06,
"loss": 1.0391,
"step": 312
},
{
"epoch": 0.6323232323232323,
"grad_norm": 0.1943359375,
"learning_rate": 3.6767676767676767e-06,
"loss": 1.0459,
"step": 313
},
{
"epoch": 0.6343434343434343,
"grad_norm": 0.1806640625,
"learning_rate": 3.6565656565656573e-06,
"loss": 1.0434,
"step": 314
},
{
"epoch": 0.6363636363636364,
"grad_norm": 0.185546875,
"learning_rate": 3.6363636363636366e-06,
"loss": 0.9825,
"step": 315
},
{
"epoch": 0.6383838383838384,
"grad_norm": 0.1904296875,
"learning_rate": 3.6161616161616163e-06,
"loss": 1.0962,
"step": 316
},
{
"epoch": 0.6404040404040404,
"grad_norm": 0.240234375,
"learning_rate": 3.595959595959596e-06,
"loss": 1.0104,
"step": 317
},
{
"epoch": 0.6424242424242425,
"grad_norm": 0.1767578125,
"learning_rate": 3.575757575757576e-06,
"loss": 0.9901,
"step": 318
},
{
"epoch": 0.6444444444444445,
"grad_norm": 0.18359375,
"learning_rate": 3.555555555555556e-06,
"loss": 1.0571,
"step": 319
},
{
"epoch": 0.6464646464646465,
"grad_norm": 0.1962890625,
"learning_rate": 3.5353535353535356e-06,
"loss": 0.959,
"step": 320
},
{
"epoch": 0.6484848484848484,
"grad_norm": 0.1884765625,
"learning_rate": 3.5151515151515154e-06,
"loss": 1.0403,
"step": 321
},
{
"epoch": 0.6505050505050505,
"grad_norm": 0.1923828125,
"learning_rate": 3.494949494949495e-06,
"loss": 1.0236,
"step": 322
},
{
"epoch": 0.6525252525252525,
"grad_norm": 0.1923828125,
"learning_rate": 3.4747474747474752e-06,
"loss": 1.0587,
"step": 323
},
{
"epoch": 0.6545454545454545,
"grad_norm": 0.1962890625,
"learning_rate": 3.454545454545455e-06,
"loss": 1.0402,
"step": 324
},
{
"epoch": 0.6565656565656566,
"grad_norm": 0.18359375,
"learning_rate": 3.4343434343434347e-06,
"loss": 1.0342,
"step": 325
},
{
"epoch": 0.6585858585858586,
"grad_norm": 0.205078125,
"learning_rate": 3.414141414141414e-06,
"loss": 1.0657,
"step": 326
},
{
"epoch": 0.6606060606060606,
"grad_norm": 0.1806640625,
"learning_rate": 3.3939393939393946e-06,
"loss": 1.0749,
"step": 327
},
{
"epoch": 0.6626262626262627,
"grad_norm": 0.181640625,
"learning_rate": 3.3737373737373743e-06,
"loss": 0.9923,
"step": 328
},
{
"epoch": 0.6646464646464646,
"grad_norm": 0.1708984375,
"learning_rate": 3.3535353535353536e-06,
"loss": 1.0401,
"step": 329
},
{
"epoch": 0.6666666666666666,
"grad_norm": 0.18359375,
"learning_rate": 3.3333333333333333e-06,
"loss": 1.0066,
"step": 330
},
{
"epoch": 0.6686868686868687,
"grad_norm": 0.19921875,
"learning_rate": 3.3131313131313135e-06,
"loss": 1.0443,
"step": 331
},
{
"epoch": 0.6707070707070707,
"grad_norm": 0.287109375,
"learning_rate": 3.292929292929293e-06,
"loss": 1.0418,
"step": 332
},
{
"epoch": 0.6727272727272727,
"grad_norm": 0.17578125,
"learning_rate": 3.272727272727273e-06,
"loss": 1.0639,
"step": 333
},
{
"epoch": 0.6747474747474748,
"grad_norm": 0.2158203125,
"learning_rate": 3.2525252525252527e-06,
"loss": 0.9951,
"step": 334
},
{
"epoch": 0.6767676767676768,
"grad_norm": 0.2431640625,
"learning_rate": 3.232323232323233e-06,
"loss": 1.0334,
"step": 335
},
{
"epoch": 0.6787878787878788,
"grad_norm": 0.31640625,
"learning_rate": 3.2121212121212125e-06,
"loss": 1.0239,
"step": 336
},
{
"epoch": 0.6808080808080809,
"grad_norm": 0.26171875,
"learning_rate": 3.1919191919191923e-06,
"loss": 1.0718,
"step": 337
},
{
"epoch": 0.6828282828282828,
"grad_norm": 0.1982421875,
"learning_rate": 3.171717171717172e-06,
"loss": 1.0556,
"step": 338
},
{
"epoch": 0.6848484848484848,
"grad_norm": 0.19140625,
"learning_rate": 3.1515151515151517e-06,
"loss": 0.9692,
"step": 339
},
{
"epoch": 0.6868686868686869,
"grad_norm": 0.1943359375,
"learning_rate": 3.131313131313132e-06,
"loss": 1.0153,
"step": 340
},
{
"epoch": 0.6888888888888889,
"grad_norm": 0.201171875,
"learning_rate": 3.1111111111111116e-06,
"loss": 1.0404,
"step": 341
},
{
"epoch": 0.6909090909090909,
"grad_norm": 0.2001953125,
"learning_rate": 3.090909090909091e-06,
"loss": 0.9807,
"step": 342
},
{
"epoch": 0.692929292929293,
"grad_norm": 0.185546875,
"learning_rate": 3.0707070707070706e-06,
"loss": 1.0399,
"step": 343
},
{
"epoch": 0.694949494949495,
"grad_norm": 0.1953125,
"learning_rate": 3.0505050505050508e-06,
"loss": 1.0141,
"step": 344
},
{
"epoch": 0.696969696969697,
"grad_norm": 0.193359375,
"learning_rate": 3.0303030303030305e-06,
"loss": 1.0309,
"step": 345
},
{
"epoch": 0.6989898989898989,
"grad_norm": 0.1904296875,
"learning_rate": 3.0101010101010102e-06,
"loss": 1.0634,
"step": 346
},
{
"epoch": 0.701010101010101,
"grad_norm": 0.1923828125,
"learning_rate": 2.98989898989899e-06,
"loss": 1.0344,
"step": 347
},
{
"epoch": 0.703030303030303,
"grad_norm": 0.1826171875,
"learning_rate": 2.96969696969697e-06,
"loss": 1.0023,
"step": 348
},
{
"epoch": 0.705050505050505,
"grad_norm": 0.19921875,
"learning_rate": 2.94949494949495e-06,
"loss": 1.0559,
"step": 349
},
{
"epoch": 0.7070707070707071,
"grad_norm": 0.1982421875,
"learning_rate": 2.9292929292929295e-06,
"loss": 0.9863,
"step": 350
},
{
"epoch": 0.7090909090909091,
"grad_norm": 0.1845703125,
"learning_rate": 2.9090909090909093e-06,
"loss": 1.0229,
"step": 351
},
{
"epoch": 0.7111111111111111,
"grad_norm": 0.20703125,
"learning_rate": 2.888888888888889e-06,
"loss": 1.044,
"step": 352
},
{
"epoch": 0.7131313131313132,
"grad_norm": 0.1923828125,
"learning_rate": 2.868686868686869e-06,
"loss": 1.0886,
"step": 353
},
{
"epoch": 0.7151515151515152,
"grad_norm": 0.2177734375,
"learning_rate": 2.848484848484849e-06,
"loss": 1.0104,
"step": 354
},
{
"epoch": 0.7171717171717171,
"grad_norm": 0.2197265625,
"learning_rate": 2.8282828282828286e-06,
"loss": 1.0258,
"step": 355
},
{
"epoch": 0.7191919191919192,
"grad_norm": 0.1845703125,
"learning_rate": 2.808080808080808e-06,
"loss": 1.0074,
"step": 356
},
{
"epoch": 0.7212121212121212,
"grad_norm": 0.18359375,
"learning_rate": 2.7878787878787885e-06,
"loss": 1.0737,
"step": 357
},
{
"epoch": 0.7232323232323232,
"grad_norm": 0.197265625,
"learning_rate": 2.7676767676767678e-06,
"loss": 1.0597,
"step": 358
},
{
"epoch": 0.7252525252525253,
"grad_norm": 0.1875,
"learning_rate": 2.7474747474747475e-06,
"loss": 1.0295,
"step": 359
},
{
"epoch": 0.7272727272727273,
"grad_norm": 0.1826171875,
"learning_rate": 2.7272727272727272e-06,
"loss": 1.0305,
"step": 360
},
{
"epoch": 0.7292929292929293,
"grad_norm": 0.1845703125,
"learning_rate": 2.7070707070707074e-06,
"loss": 1.0508,
"step": 361
},
{
"epoch": 0.7313131313131314,
"grad_norm": 0.1953125,
"learning_rate": 2.686868686868687e-06,
"loss": 1.008,
"step": 362
},
{
"epoch": 0.7333333333333333,
"grad_norm": 0.1806640625,
"learning_rate": 2.666666666666667e-06,
"loss": 0.9775,
"step": 363
},
{
"epoch": 0.7353535353535353,
"grad_norm": 0.1953125,
"learning_rate": 2.6464646464646466e-06,
"loss": 1.033,
"step": 364
},
{
"epoch": 0.7373737373737373,
"grad_norm": 0.2099609375,
"learning_rate": 2.6262626262626267e-06,
"loss": 1.0351,
"step": 365
},
{
"epoch": 0.7393939393939394,
"grad_norm": 0.166015625,
"learning_rate": 2.6060606060606064e-06,
"loss": 0.9793,
"step": 366
},
{
"epoch": 0.7414141414141414,
"grad_norm": 0.2060546875,
"learning_rate": 2.585858585858586e-06,
"loss": 1.0528,
"step": 367
},
{
"epoch": 0.7434343434343434,
"grad_norm": 0.2099609375,
"learning_rate": 2.565656565656566e-06,
"loss": 1.03,
"step": 368
},
{
"epoch": 0.7454545454545455,
"grad_norm": 0.1826171875,
"learning_rate": 2.5454545454545456e-06,
"loss": 1.0614,
"step": 369
},
{
"epoch": 0.7474747474747475,
"grad_norm": 0.181640625,
"learning_rate": 2.5252525252525258e-06,
"loss": 1.0243,
"step": 370
},
{
"epoch": 0.7494949494949495,
"grad_norm": 0.2060546875,
"learning_rate": 2.5050505050505055e-06,
"loss": 1.0558,
"step": 371
},
{
"epoch": 0.7515151515151515,
"grad_norm": 0.18359375,
"learning_rate": 2.4848484848484848e-06,
"loss": 1.0429,
"step": 372
},
{
"epoch": 0.7535353535353535,
"grad_norm": 0.21484375,
"learning_rate": 2.464646464646465e-06,
"loss": 1.1272,
"step": 373
},
{
"epoch": 0.7555555555555555,
"grad_norm": 0.28515625,
"learning_rate": 2.4444444444444447e-06,
"loss": 1.0403,
"step": 374
},
{
"epoch": 0.7575757575757576,
"grad_norm": 0.1943359375,
"learning_rate": 2.4242424242424244e-06,
"loss": 1.0417,
"step": 375
},
{
"epoch": 0.7595959595959596,
"grad_norm": 0.177734375,
"learning_rate": 2.404040404040404e-06,
"loss": 1.0172,
"step": 376
},
{
"epoch": 0.7616161616161616,
"grad_norm": 0.18359375,
"learning_rate": 2.3838383838383843e-06,
"loss": 1.0566,
"step": 377
},
{
"epoch": 0.7636363636363637,
"grad_norm": 0.234375,
"learning_rate": 2.363636363636364e-06,
"loss": 1.0459,
"step": 378
},
{
"epoch": 0.7656565656565657,
"grad_norm": 0.17578125,
"learning_rate": 2.3434343434343437e-06,
"loss": 1.0115,
"step": 379
},
{
"epoch": 0.7676767676767676,
"grad_norm": 0.197265625,
"learning_rate": 2.3232323232323234e-06,
"loss": 1.0139,
"step": 380
},
{
"epoch": 0.7696969696969697,
"grad_norm": 0.1865234375,
"learning_rate": 2.303030303030303e-06,
"loss": 1.0304,
"step": 381
},
{
"epoch": 0.7717171717171717,
"grad_norm": 0.2470703125,
"learning_rate": 2.282828282828283e-06,
"loss": 0.9806,
"step": 382
},
{
"epoch": 0.7737373737373737,
"grad_norm": 0.21484375,
"learning_rate": 2.262626262626263e-06,
"loss": 1.0062,
"step": 383
},
{
"epoch": 0.7757575757575758,
"grad_norm": 0.1875,
"learning_rate": 2.2424242424242428e-06,
"loss": 1.0253,
"step": 384
},
{
"epoch": 0.7777777777777778,
"grad_norm": 0.2109375,
"learning_rate": 2.222222222222222e-06,
"loss": 1.0317,
"step": 385
},
{
"epoch": 0.7797979797979798,
"grad_norm": 0.18359375,
"learning_rate": 2.2020202020202022e-06,
"loss": 1.0146,
"step": 386
},
{
"epoch": 0.7818181818181819,
"grad_norm": 0.2001953125,
"learning_rate": 2.181818181818182e-06,
"loss": 0.9777,
"step": 387
},
{
"epoch": 0.7838383838383839,
"grad_norm": 0.25390625,
"learning_rate": 2.1616161616161617e-06,
"loss": 1.0182,
"step": 388
},
{
"epoch": 0.7858585858585858,
"grad_norm": 0.2060546875,
"learning_rate": 2.1414141414141414e-06,
"loss": 1.0215,
"step": 389
},
{
"epoch": 0.7878787878787878,
"grad_norm": 0.1904296875,
"learning_rate": 2.1212121212121216e-06,
"loss": 1.0146,
"step": 390
},
{
"epoch": 0.7898989898989899,
"grad_norm": 0.181640625,
"learning_rate": 2.1010101010101013e-06,
"loss": 1.002,
"step": 391
},
{
"epoch": 0.7919191919191919,
"grad_norm": 0.1748046875,
"learning_rate": 2.080808080808081e-06,
"loss": 1.0192,
"step": 392
},
{
"epoch": 0.793939393939394,
"grad_norm": 0.1806640625,
"learning_rate": 2.0606060606060607e-06,
"loss": 0.9827,
"step": 393
},
{
"epoch": 0.795959595959596,
"grad_norm": 0.421875,
"learning_rate": 2.0404040404040405e-06,
"loss": 1.0718,
"step": 394
},
{
"epoch": 0.797979797979798,
"grad_norm": 0.1884765625,
"learning_rate": 2.02020202020202e-06,
"loss": 1.0095,
"step": 395
},
{
"epoch": 0.8,
"grad_norm": 0.21875,
"learning_rate": 2.0000000000000003e-06,
"loss": 1.0553,
"step": 396
},
{
"epoch": 0.802020202020202,
"grad_norm": 0.2412109375,
"learning_rate": 1.97979797979798e-06,
"loss": 0.9963,
"step": 397
},
{
"epoch": 0.804040404040404,
"grad_norm": 0.18359375,
"learning_rate": 1.9595959595959598e-06,
"loss": 1.0107,
"step": 398
},
{
"epoch": 0.806060606060606,
"grad_norm": 0.25390625,
"learning_rate": 1.9393939393939395e-06,
"loss": 1.036,
"step": 399
},
{
"epoch": 0.8080808080808081,
"grad_norm": 0.1865234375,
"learning_rate": 1.9191919191919192e-06,
"loss": 1.0436,
"step": 400
},
{
"epoch": 0.8101010101010101,
"grad_norm": 0.1953125,
"learning_rate": 1.8989898989898992e-06,
"loss": 1.0567,
"step": 401
},
{
"epoch": 0.8121212121212121,
"grad_norm": 0.1806640625,
"learning_rate": 1.878787878787879e-06,
"loss": 0.975,
"step": 402
},
{
"epoch": 0.8141414141414142,
"grad_norm": 0.2158203125,
"learning_rate": 1.8585858585858588e-06,
"loss": 1.0012,
"step": 403
},
{
"epoch": 0.8161616161616162,
"grad_norm": 0.1943359375,
"learning_rate": 1.8383838383838384e-06,
"loss": 1.051,
"step": 404
},
{
"epoch": 0.8181818181818182,
"grad_norm": 0.26171875,
"learning_rate": 1.8181818181818183e-06,
"loss": 1.0274,
"step": 405
},
{
"epoch": 0.8202020202020202,
"grad_norm": 0.1982421875,
"learning_rate": 1.797979797979798e-06,
"loss": 0.9988,
"step": 406
},
{
"epoch": 0.8222222222222222,
"grad_norm": 0.2216796875,
"learning_rate": 1.777777777777778e-06,
"loss": 1.0229,
"step": 407
},
{
"epoch": 0.8242424242424242,
"grad_norm": 0.1787109375,
"learning_rate": 1.7575757575757577e-06,
"loss": 1.0392,
"step": 408
},
{
"epoch": 0.8262626262626263,
"grad_norm": 0.1796875,
"learning_rate": 1.7373737373737376e-06,
"loss": 1.002,
"step": 409
},
{
"epoch": 0.8282828282828283,
"grad_norm": 0.2099609375,
"learning_rate": 1.7171717171717173e-06,
"loss": 1.0471,
"step": 410
},
{
"epoch": 0.8303030303030303,
"grad_norm": 0.21484375,
"learning_rate": 1.6969696969696973e-06,
"loss": 1.0112,
"step": 411
},
{
"epoch": 0.8323232323232324,
"grad_norm": 0.2001953125,
"learning_rate": 1.6767676767676768e-06,
"loss": 1.0887,
"step": 412
},
{
"epoch": 0.8343434343434344,
"grad_norm": 0.181640625,
"learning_rate": 1.6565656565656567e-06,
"loss": 1.014,
"step": 413
},
{
"epoch": 0.8363636363636363,
"grad_norm": 0.25390625,
"learning_rate": 1.6363636363636365e-06,
"loss": 0.9878,
"step": 414
},
{
"epoch": 0.8383838383838383,
"grad_norm": 0.1796875,
"learning_rate": 1.6161616161616164e-06,
"loss": 0.9874,
"step": 415
},
{
"epoch": 0.8404040404040404,
"grad_norm": 0.224609375,
"learning_rate": 1.5959595959595961e-06,
"loss": 1.0532,
"step": 416
},
{
"epoch": 0.8424242424242424,
"grad_norm": 0.1826171875,
"learning_rate": 1.5757575757575759e-06,
"loss": 0.9955,
"step": 417
},
{
"epoch": 0.8444444444444444,
"grad_norm": 0.1748046875,
"learning_rate": 1.5555555555555558e-06,
"loss": 1.0148,
"step": 418
},
{
"epoch": 0.8464646464646465,
"grad_norm": 0.197265625,
"learning_rate": 1.5353535353535353e-06,
"loss": 1.0331,
"step": 419
},
{
"epoch": 0.8484848484848485,
"grad_norm": 0.1748046875,
"learning_rate": 1.5151515151515152e-06,
"loss": 1.0199,
"step": 420
},
{
"epoch": 0.8505050505050505,
"grad_norm": 0.185546875,
"learning_rate": 1.494949494949495e-06,
"loss": 0.9248,
"step": 421
},
{
"epoch": 0.8525252525252526,
"grad_norm": 0.18359375,
"learning_rate": 1.474747474747475e-06,
"loss": 1.0306,
"step": 422
},
{
"epoch": 0.8545454545454545,
"grad_norm": 0.1748046875,
"learning_rate": 1.4545454545454546e-06,
"loss": 1.0285,
"step": 423
},
{
"epoch": 0.8565656565656565,
"grad_norm": 0.2060546875,
"learning_rate": 1.4343434343434346e-06,
"loss": 1.0151,
"step": 424
},
{
"epoch": 0.8585858585858586,
"grad_norm": 0.1845703125,
"learning_rate": 1.4141414141414143e-06,
"loss": 1.0575,
"step": 425
},
{
"epoch": 0.8606060606060606,
"grad_norm": 0.197265625,
"learning_rate": 1.3939393939393942e-06,
"loss": 1.0269,
"step": 426
},
{
"epoch": 0.8626262626262626,
"grad_norm": 0.2216796875,
"learning_rate": 1.3737373737373738e-06,
"loss": 1.0597,
"step": 427
},
{
"epoch": 0.8646464646464647,
"grad_norm": 0.224609375,
"learning_rate": 1.3535353535353537e-06,
"loss": 0.997,
"step": 428
},
{
"epoch": 0.8666666666666667,
"grad_norm": 0.208984375,
"learning_rate": 1.3333333333333334e-06,
"loss": 1.0348,
"step": 429
},
{
"epoch": 0.8686868686868687,
"grad_norm": 0.1787109375,
"learning_rate": 1.3131313131313134e-06,
"loss": 0.978,
"step": 430
},
{
"epoch": 0.8707070707070707,
"grad_norm": 0.2470703125,
"learning_rate": 1.292929292929293e-06,
"loss": 1.0392,
"step": 431
},
{
"epoch": 0.8727272727272727,
"grad_norm": 0.1796875,
"learning_rate": 1.2727272727272728e-06,
"loss": 1.003,
"step": 432
},
{
"epoch": 0.8747474747474747,
"grad_norm": 0.189453125,
"learning_rate": 1.2525252525252527e-06,
"loss": 1.0488,
"step": 433
},
{
"epoch": 0.8767676767676768,
"grad_norm": 0.1875,
"learning_rate": 1.2323232323232325e-06,
"loss": 1.0134,
"step": 434
},
{
"epoch": 0.8787878787878788,
"grad_norm": 0.1943359375,
"learning_rate": 1.2121212121212122e-06,
"loss": 1.0099,
"step": 435
},
{
"epoch": 0.8808080808080808,
"grad_norm": 0.181640625,
"learning_rate": 1.1919191919191921e-06,
"loss": 1.0093,
"step": 436
},
{
"epoch": 0.8828282828282829,
"grad_norm": 0.197265625,
"learning_rate": 1.1717171717171719e-06,
"loss": 1.041,
"step": 437
},
{
"epoch": 0.8848484848484849,
"grad_norm": 0.1904296875,
"learning_rate": 1.1515151515151516e-06,
"loss": 1.0555,
"step": 438
},
{
"epoch": 0.8868686868686869,
"grad_norm": 0.1962890625,
"learning_rate": 1.1313131313131315e-06,
"loss": 0.996,
"step": 439
},
{
"epoch": 0.8888888888888888,
"grad_norm": 0.26953125,
"learning_rate": 1.111111111111111e-06,
"loss": 1.0446,
"step": 440
},
{
"epoch": 0.8909090909090909,
"grad_norm": 0.279296875,
"learning_rate": 1.090909090909091e-06,
"loss": 1.0465,
"step": 441
},
{
"epoch": 0.8929292929292929,
"grad_norm": 0.2294921875,
"learning_rate": 1.0707070707070707e-06,
"loss": 1.0364,
"step": 442
},
{
"epoch": 0.8949494949494949,
"grad_norm": 0.185546875,
"learning_rate": 1.0505050505050506e-06,
"loss": 1.0254,
"step": 443
},
{
"epoch": 0.896969696969697,
"grad_norm": 0.2138671875,
"learning_rate": 1.0303030303030304e-06,
"loss": 0.9752,
"step": 444
},
{
"epoch": 0.898989898989899,
"grad_norm": 0.2255859375,
"learning_rate": 1.01010101010101e-06,
"loss": 1.0568,
"step": 445
},
{
"epoch": 0.901010101010101,
"grad_norm": 0.181640625,
"learning_rate": 9.8989898989899e-07,
"loss": 1.0076,
"step": 446
},
{
"epoch": 0.9030303030303031,
"grad_norm": 0.189453125,
"learning_rate": 9.696969696969698e-07,
"loss": 1.0118,
"step": 447
},
{
"epoch": 0.9050505050505051,
"grad_norm": 0.294921875,
"learning_rate": 9.494949494949496e-07,
"loss": 1.0203,
"step": 448
},
{
"epoch": 0.907070707070707,
"grad_norm": 0.2138671875,
"learning_rate": 9.292929292929294e-07,
"loss": 1.0872,
"step": 449
},
{
"epoch": 0.9090909090909091,
"grad_norm": 0.1796875,
"learning_rate": 9.090909090909091e-07,
"loss": 1.0107,
"step": 450
},
{
"epoch": 0.9111111111111111,
"grad_norm": 0.5234375,
"learning_rate": 8.88888888888889e-07,
"loss": 0.9961,
"step": 451
},
{
"epoch": 0.9131313131313131,
"grad_norm": 0.376953125,
"learning_rate": 8.686868686868688e-07,
"loss": 0.9945,
"step": 452
},
{
"epoch": 0.9151515151515152,
"grad_norm": 0.185546875,
"learning_rate": 8.484848484848486e-07,
"loss": 1.0275,
"step": 453
},
{
"epoch": 0.9171717171717172,
"grad_norm": 0.22265625,
"learning_rate": 8.282828282828284e-07,
"loss": 1.0274,
"step": 454
},
{
"epoch": 0.9191919191919192,
"grad_norm": 0.1826171875,
"learning_rate": 8.080808080808082e-07,
"loss": 1.0346,
"step": 455
},
{
"epoch": 0.9212121212121213,
"grad_norm": 0.2314453125,
"learning_rate": 7.878787878787879e-07,
"loss": 1.098,
"step": 456
},
{
"epoch": 0.9232323232323232,
"grad_norm": 0.28125,
"learning_rate": 7.676767676767677e-07,
"loss": 0.9775,
"step": 457
},
{
"epoch": 0.9252525252525252,
"grad_norm": 0.1923828125,
"learning_rate": 7.474747474747475e-07,
"loss": 1.0375,
"step": 458
},
{
"epoch": 0.9272727272727272,
"grad_norm": 0.57421875,
"learning_rate": 7.272727272727273e-07,
"loss": 1.0435,
"step": 459
},
{
"epoch": 0.9292929292929293,
"grad_norm": 0.2099609375,
"learning_rate": 7.070707070707071e-07,
"loss": 0.9988,
"step": 460
},
{
"epoch": 0.9313131313131313,
"grad_norm": 0.2216796875,
"learning_rate": 6.868686868686869e-07,
"loss": 1.0125,
"step": 461
},
{
"epoch": 0.9333333333333333,
"grad_norm": 0.18359375,
"learning_rate": 6.666666666666667e-07,
"loss": 1.0182,
"step": 462
},
{
"epoch": 0.9353535353535354,
"grad_norm": 0.2041015625,
"learning_rate": 6.464646464646465e-07,
"loss": 0.9986,
"step": 463
},
{
"epoch": 0.9373737373737374,
"grad_norm": 0.1953125,
"learning_rate": 6.262626262626264e-07,
"loss": 1.0093,
"step": 464
},
{
"epoch": 0.9393939393939394,
"grad_norm": 0.189453125,
"learning_rate": 6.060606060606061e-07,
"loss": 0.9871,
"step": 465
},
{
"epoch": 0.9414141414141414,
"grad_norm": 0.255859375,
"learning_rate": 5.858585858585859e-07,
"loss": 1.0191,
"step": 466
},
{
"epoch": 0.9434343434343434,
"grad_norm": 0.265625,
"learning_rate": 5.656565656565658e-07,
"loss": 1.0001,
"step": 467
},
{
"epoch": 0.9454545454545454,
"grad_norm": 0.2001953125,
"learning_rate": 5.454545454545455e-07,
"loss": 0.9726,
"step": 468
},
{
"epoch": 0.9474747474747475,
"grad_norm": 0.1962890625,
"learning_rate": 5.252525252525253e-07,
"loss": 1.069,
"step": 469
},
{
"epoch": 0.9494949494949495,
"grad_norm": 0.2373046875,
"learning_rate": 5.05050505050505e-07,
"loss": 1.0334,
"step": 470
},
{
"epoch": 0.9515151515151515,
"grad_norm": 0.177734375,
"learning_rate": 4.848484848484849e-07,
"loss": 1.0442,
"step": 471
},
{
"epoch": 0.9535353535353536,
"grad_norm": 0.1806640625,
"learning_rate": 4.646464646464647e-07,
"loss": 1.0116,
"step": 472
},
{
"epoch": 0.9555555555555556,
"grad_norm": 0.69921875,
"learning_rate": 4.444444444444445e-07,
"loss": 0.9919,
"step": 473
},
{
"epoch": 0.9575757575757575,
"grad_norm": 0.1865234375,
"learning_rate": 4.242424242424243e-07,
"loss": 1.012,
"step": 474
},
{
"epoch": 0.9595959595959596,
"grad_norm": 0.19140625,
"learning_rate": 4.040404040404041e-07,
"loss": 0.9972,
"step": 475
},
{
"epoch": 0.9616161616161616,
"grad_norm": 0.216796875,
"learning_rate": 3.838383838383838e-07,
"loss": 1.0253,
"step": 476
},
{
"epoch": 0.9636363636363636,
"grad_norm": 0.1796875,
"learning_rate": 3.6363636363636366e-07,
"loss": 1.0101,
"step": 477
},
{
"epoch": 0.9656565656565657,
"grad_norm": 0.1796875,
"learning_rate": 3.4343434343434344e-07,
"loss": 0.9982,
"step": 478
},
{
"epoch": 0.9676767676767677,
"grad_norm": 0.1982421875,
"learning_rate": 3.2323232323232327e-07,
"loss": 1.0391,
"step": 479
},
{
"epoch": 0.9696969696969697,
"grad_norm": 0.1787109375,
"learning_rate": 3.0303030303030305e-07,
"loss": 1.0184,
"step": 480
},
{
"epoch": 0.9717171717171718,
"grad_norm": 0.181640625,
"learning_rate": 2.828282828282829e-07,
"loss": 1.0248,
"step": 481
},
{
"epoch": 0.9737373737373738,
"grad_norm": 0.177734375,
"learning_rate": 2.6262626262626266e-07,
"loss": 0.9759,
"step": 482
},
{
"epoch": 0.9757575757575757,
"grad_norm": 0.185546875,
"learning_rate": 2.4242424242424244e-07,
"loss": 1.0363,
"step": 483
},
{
"epoch": 0.9777777777777777,
"grad_norm": 0.177734375,
"learning_rate": 2.2222222222222224e-07,
"loss": 1.0131,
"step": 484
},
{
"epoch": 0.9797979797979798,
"grad_norm": 0.197265625,
"learning_rate": 2.0202020202020205e-07,
"loss": 1.0333,
"step": 485
},
{
"epoch": 0.9818181818181818,
"grad_norm": 0.1962890625,
"learning_rate": 1.8181818181818183e-07,
"loss": 1.057,
"step": 486
},
{
"epoch": 0.9838383838383838,
"grad_norm": 0.1865234375,
"learning_rate": 1.6161616161616163e-07,
"loss": 1.0465,
"step": 487
},
{
"epoch": 0.9858585858585859,
"grad_norm": 0.1943359375,
"learning_rate": 1.4141414141414144e-07,
"loss": 1.0177,
"step": 488
},
{
"epoch": 0.9878787878787879,
"grad_norm": 0.251953125,
"learning_rate": 1.2121212121212122e-07,
"loss": 1.0767,
"step": 489
},
{
"epoch": 0.98989898989899,
"grad_norm": 0.2099609375,
"learning_rate": 1.0101010101010103e-07,
"loss": 1.0518,
"step": 490
},
{
"epoch": 0.9919191919191919,
"grad_norm": 0.1796875,
"learning_rate": 8.080808080808082e-08,
"loss": 1.0433,
"step": 491
},
{
"epoch": 0.9939393939393939,
"grad_norm": 0.208984375,
"learning_rate": 6.060606060606061e-08,
"loss": 1.0402,
"step": 492
},
{
"epoch": 0.9959595959595959,
"grad_norm": 0.443359375,
"learning_rate": 4.040404040404041e-08,
"loss": 1.0026,
"step": 493
},
{
"epoch": 0.997979797979798,
"grad_norm": 0.228515625,
"learning_rate": 2.0202020202020204e-08,
"loss": 1.1265,
"step": 494
},
{
"epoch": 1.0,
"grad_norm": 0.1806640625,
"learning_rate": 0.0,
"loss": 1.0203,
"step": 495
}
],
"logging_steps": 1.0,
"max_steps": 495,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 0,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.5810777051205468e+18,
"train_batch_size": 1,
"trial_name": null,
"trial_params": null
}