llama3_ko_chavis_JH / checkpoint-810 /trainer_state.json
GemmaLatte's picture
llama3_karfit_upload
ee77185
raw
history blame
97.6 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 4.848086124401914,
"eval_steps": 500,
"global_step": 810,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"learning_rate": 5.050505050505052e-07,
"loss": 2.9831,
"step": 1
},
{
"epoch": 0.03,
"learning_rate": 1.0101010101010103e-06,
"loss": 3.0585,
"step": 2
},
{
"epoch": 0.05,
"learning_rate": 1.5151515151515152e-06,
"loss": 2.9613,
"step": 3
},
{
"epoch": 0.06,
"learning_rate": 2.0202020202020206e-06,
"loss": 2.9218,
"step": 4
},
{
"epoch": 0.08,
"learning_rate": 2.5252525252525253e-06,
"loss": 3.1384,
"step": 5
},
{
"epoch": 0.09,
"learning_rate": 3.0303030303030305e-06,
"loss": 2.9236,
"step": 6
},
{
"epoch": 0.11,
"learning_rate": 3.5353535353535352e-06,
"loss": 2.9136,
"step": 7
},
{
"epoch": 0.12,
"learning_rate": 4.040404040404041e-06,
"loss": 2.8794,
"step": 8
},
{
"epoch": 0.14,
"learning_rate": 4.5454545454545455e-06,
"loss": 2.9476,
"step": 9
},
{
"epoch": 0.15,
"learning_rate": 5.050505050505051e-06,
"loss": 2.8648,
"step": 10
},
{
"epoch": 0.17,
"learning_rate": 5.555555555555556e-06,
"loss": 2.7458,
"step": 11
},
{
"epoch": 0.18,
"learning_rate": 6.060606060606061e-06,
"loss": 2.995,
"step": 12
},
{
"epoch": 0.2,
"learning_rate": 6.565656565656567e-06,
"loss": 2.7891,
"step": 13
},
{
"epoch": 0.21,
"learning_rate": 7.0707070707070704e-06,
"loss": 2.9389,
"step": 14
},
{
"epoch": 0.23,
"learning_rate": 7.5757575757575764e-06,
"loss": 2.9519,
"step": 15
},
{
"epoch": 0.24,
"learning_rate": 8.080808080808082e-06,
"loss": 2.9985,
"step": 16
},
{
"epoch": 0.26,
"learning_rate": 8.585858585858587e-06,
"loss": 2.8554,
"step": 17
},
{
"epoch": 0.27,
"learning_rate": 9.090909090909091e-06,
"loss": 2.9224,
"step": 18
},
{
"epoch": 0.29,
"learning_rate": 9.595959595959595e-06,
"loss": 2.7839,
"step": 19
},
{
"epoch": 0.3,
"learning_rate": 1.0101010101010101e-05,
"loss": 2.7728,
"step": 20
},
{
"epoch": 0.32,
"learning_rate": 1.0606060606060607e-05,
"loss": 2.9102,
"step": 21
},
{
"epoch": 0.33,
"learning_rate": 1.1111111111111112e-05,
"loss": 2.8321,
"step": 22
},
{
"epoch": 0.35,
"learning_rate": 1.1616161616161616e-05,
"loss": 2.7018,
"step": 23
},
{
"epoch": 0.36,
"learning_rate": 1.2121212121212122e-05,
"loss": 2.6893,
"step": 24
},
{
"epoch": 0.38,
"learning_rate": 1.2626262626262628e-05,
"loss": 2.5533,
"step": 25
},
{
"epoch": 0.39,
"learning_rate": 1.3131313131313134e-05,
"loss": 2.563,
"step": 26
},
{
"epoch": 0.41,
"learning_rate": 1.3636363636363637e-05,
"loss": 2.6159,
"step": 27
},
{
"epoch": 0.42,
"learning_rate": 1.4141414141414141e-05,
"loss": 2.5117,
"step": 28
},
{
"epoch": 0.44,
"learning_rate": 1.4646464646464647e-05,
"loss": 2.5404,
"step": 29
},
{
"epoch": 0.45,
"learning_rate": 1.5151515151515153e-05,
"loss": 2.5025,
"step": 30
},
{
"epoch": 0.47,
"learning_rate": 1.565656565656566e-05,
"loss": 2.4743,
"step": 31
},
{
"epoch": 0.48,
"learning_rate": 1.6161616161616165e-05,
"loss": 2.4143,
"step": 32
},
{
"epoch": 0.5,
"learning_rate": 1.6666666666666667e-05,
"loss": 2.3167,
"step": 33
},
{
"epoch": 0.51,
"learning_rate": 1.7171717171717173e-05,
"loss": 2.3055,
"step": 34
},
{
"epoch": 0.53,
"learning_rate": 1.7676767676767676e-05,
"loss": 2.2991,
"step": 35
},
{
"epoch": 0.54,
"learning_rate": 1.8181818181818182e-05,
"loss": 2.1632,
"step": 36
},
{
"epoch": 0.56,
"learning_rate": 1.8686868686868688e-05,
"loss": 2.2231,
"step": 37
},
{
"epoch": 0.57,
"learning_rate": 1.919191919191919e-05,
"loss": 2.0811,
"step": 38
},
{
"epoch": 0.59,
"learning_rate": 1.9696969696969697e-05,
"loss": 2.0147,
"step": 39
},
{
"epoch": 0.6,
"learning_rate": 2.0202020202020203e-05,
"loss": 1.9772,
"step": 40
},
{
"epoch": 0.62,
"learning_rate": 2.070707070707071e-05,
"loss": 1.9509,
"step": 41
},
{
"epoch": 0.63,
"learning_rate": 2.1212121212121215e-05,
"loss": 1.88,
"step": 42
},
{
"epoch": 0.65,
"learning_rate": 2.171717171717172e-05,
"loss": 1.7471,
"step": 43
},
{
"epoch": 0.66,
"learning_rate": 2.2222222222222223e-05,
"loss": 1.7748,
"step": 44
},
{
"epoch": 0.68,
"learning_rate": 2.272727272727273e-05,
"loss": 1.6766,
"step": 45
},
{
"epoch": 0.69,
"learning_rate": 2.3232323232323232e-05,
"loss": 1.634,
"step": 46
},
{
"epoch": 0.71,
"learning_rate": 2.3737373737373738e-05,
"loss": 1.5996,
"step": 47
},
{
"epoch": 0.72,
"learning_rate": 2.4242424242424244e-05,
"loss": 1.5091,
"step": 48
},
{
"epoch": 0.74,
"learning_rate": 2.474747474747475e-05,
"loss": 1.5411,
"step": 49
},
{
"epoch": 0.75,
"learning_rate": 2.5252525252525256e-05,
"loss": 1.4753,
"step": 50
},
{
"epoch": 0.77,
"learning_rate": 2.575757575757576e-05,
"loss": 1.4678,
"step": 51
},
{
"epoch": 0.78,
"learning_rate": 2.6262626262626268e-05,
"loss": 1.5142,
"step": 52
},
{
"epoch": 0.8,
"learning_rate": 2.676767676767677e-05,
"loss": 1.4128,
"step": 53
},
{
"epoch": 0.81,
"learning_rate": 2.7272727272727273e-05,
"loss": 1.4542,
"step": 54
},
{
"epoch": 0.83,
"learning_rate": 2.777777777777778e-05,
"loss": 1.4162,
"step": 55
},
{
"epoch": 0.84,
"learning_rate": 2.8282828282828282e-05,
"loss": 1.4502,
"step": 56
},
{
"epoch": 0.86,
"learning_rate": 2.878787878787879e-05,
"loss": 1.295,
"step": 57
},
{
"epoch": 0.87,
"learning_rate": 2.9292929292929294e-05,
"loss": 1.3446,
"step": 58
},
{
"epoch": 0.89,
"learning_rate": 2.9797979797979796e-05,
"loss": 1.2936,
"step": 59
},
{
"epoch": 0.9,
"learning_rate": 3.0303030303030306e-05,
"loss": 1.2942,
"step": 60
},
{
"epoch": 0.92,
"learning_rate": 3.080808080808081e-05,
"loss": 1.3991,
"step": 61
},
{
"epoch": 0.94,
"learning_rate": 3.131313131313132e-05,
"loss": 1.2571,
"step": 62
},
{
"epoch": 0.95,
"learning_rate": 3.181818181818182e-05,
"loss": 1.4106,
"step": 63
},
{
"epoch": 0.97,
"learning_rate": 3.232323232323233e-05,
"loss": 1.3386,
"step": 64
},
{
"epoch": 0.98,
"learning_rate": 3.282828282828283e-05,
"loss": 1.2726,
"step": 65
},
{
"epoch": 1.0,
"learning_rate": 3.3333333333333335e-05,
"loss": 1.3484,
"step": 66
},
{
"epoch": 1.01,
"learning_rate": 3.3838383838383844e-05,
"loss": 1.3355,
"step": 67
},
{
"epoch": 1.03,
"learning_rate": 3.434343434343435e-05,
"loss": 1.3021,
"step": 68
},
{
"epoch": 1.04,
"learning_rate": 3.484848484848485e-05,
"loss": 1.2461,
"step": 69
},
{
"epoch": 1.06,
"learning_rate": 3.535353535353535e-05,
"loss": 1.346,
"step": 70
},
{
"epoch": 1.07,
"learning_rate": 3.5858585858585855e-05,
"loss": 1.3081,
"step": 71
},
{
"epoch": 1.09,
"learning_rate": 3.6363636363636364e-05,
"loss": 1.338,
"step": 72
},
{
"epoch": 1.1,
"learning_rate": 3.686868686868687e-05,
"loss": 1.3435,
"step": 73
},
{
"epoch": 1.12,
"learning_rate": 3.7373737373737376e-05,
"loss": 1.3025,
"step": 74
},
{
"epoch": 1.13,
"learning_rate": 3.787878787878788e-05,
"loss": 1.2198,
"step": 75
},
{
"epoch": 1.15,
"learning_rate": 3.838383838383838e-05,
"loss": 1.314,
"step": 76
},
{
"epoch": 1.16,
"learning_rate": 3.888888888888889e-05,
"loss": 1.262,
"step": 77
},
{
"epoch": 1.18,
"learning_rate": 3.939393939393939e-05,
"loss": 1.3292,
"step": 78
},
{
"epoch": 1.19,
"learning_rate": 3.98989898989899e-05,
"loss": 1.1564,
"step": 79
},
{
"epoch": 1.21,
"learning_rate": 4.0404040404040405e-05,
"loss": 1.1664,
"step": 80
},
{
"epoch": 1.22,
"learning_rate": 4.0909090909090915e-05,
"loss": 1.262,
"step": 81
},
{
"epoch": 1.24,
"learning_rate": 4.141414141414142e-05,
"loss": 1.2889,
"step": 82
},
{
"epoch": 1.25,
"learning_rate": 4.191919191919192e-05,
"loss": 1.1571,
"step": 83
},
{
"epoch": 1.27,
"learning_rate": 4.242424242424243e-05,
"loss": 1.165,
"step": 84
},
{
"epoch": 1.28,
"learning_rate": 4.292929292929293e-05,
"loss": 1.1475,
"step": 85
},
{
"epoch": 1.3,
"learning_rate": 4.343434343434344e-05,
"loss": 1.3032,
"step": 86
},
{
"epoch": 1.31,
"learning_rate": 4.3939393939393944e-05,
"loss": 1.3794,
"step": 87
},
{
"epoch": 1.33,
"learning_rate": 4.4444444444444447e-05,
"loss": 1.2761,
"step": 88
},
{
"epoch": 1.34,
"learning_rate": 4.494949494949495e-05,
"loss": 1.1025,
"step": 89
},
{
"epoch": 1.36,
"learning_rate": 4.545454545454546e-05,
"loss": 1.139,
"step": 90
},
{
"epoch": 1.37,
"learning_rate": 4.595959595959596e-05,
"loss": 1.2408,
"step": 91
},
{
"epoch": 1.39,
"learning_rate": 4.6464646464646464e-05,
"loss": 1.2418,
"step": 92
},
{
"epoch": 1.4,
"learning_rate": 4.696969696969697e-05,
"loss": 1.089,
"step": 93
},
{
"epoch": 1.42,
"learning_rate": 4.7474747474747476e-05,
"loss": 1.2089,
"step": 94
},
{
"epoch": 1.43,
"learning_rate": 4.797979797979798e-05,
"loss": 1.1818,
"step": 95
},
{
"epoch": 1.45,
"learning_rate": 4.848484848484849e-05,
"loss": 1.255,
"step": 96
},
{
"epoch": 1.46,
"learning_rate": 4.898989898989899e-05,
"loss": 1.2591,
"step": 97
},
{
"epoch": 1.48,
"learning_rate": 4.94949494949495e-05,
"loss": 1.1962,
"step": 98
},
{
"epoch": 1.49,
"learning_rate": 5e-05,
"loss": 1.1802,
"step": 99
},
{
"epoch": 1.51,
"learning_rate": 5.050505050505051e-05,
"loss": 1.2168,
"step": 100
},
{
"epoch": 1.52,
"learning_rate": 5.101010101010101e-05,
"loss": 1.203,
"step": 101
},
{
"epoch": 1.54,
"learning_rate": 5.151515151515152e-05,
"loss": 1.2206,
"step": 102
},
{
"epoch": 1.55,
"learning_rate": 5.2020202020202026e-05,
"loss": 1.1691,
"step": 103
},
{
"epoch": 1.57,
"learning_rate": 5.2525252525252536e-05,
"loss": 1.1194,
"step": 104
},
{
"epoch": 1.58,
"learning_rate": 5.303030303030303e-05,
"loss": 1.1041,
"step": 105
},
{
"epoch": 1.6,
"learning_rate": 5.353535353535354e-05,
"loss": 1.1181,
"step": 106
},
{
"epoch": 1.61,
"learning_rate": 5.4040404040404044e-05,
"loss": 1.2211,
"step": 107
},
{
"epoch": 1.63,
"learning_rate": 5.4545454545454546e-05,
"loss": 1.2155,
"step": 108
},
{
"epoch": 1.64,
"learning_rate": 5.5050505050505056e-05,
"loss": 1.0976,
"step": 109
},
{
"epoch": 1.66,
"learning_rate": 5.555555555555556e-05,
"loss": 1.27,
"step": 110
},
{
"epoch": 1.67,
"learning_rate": 5.606060606060606e-05,
"loss": 1.0708,
"step": 111
},
{
"epoch": 1.69,
"learning_rate": 5.6565656565656563e-05,
"loss": 1.1921,
"step": 112
},
{
"epoch": 1.7,
"learning_rate": 5.707070707070707e-05,
"loss": 1.1495,
"step": 113
},
{
"epoch": 1.72,
"learning_rate": 5.757575757575758e-05,
"loss": 1.113,
"step": 114
},
{
"epoch": 1.73,
"learning_rate": 5.808080808080808e-05,
"loss": 1.1519,
"step": 115
},
{
"epoch": 1.75,
"learning_rate": 5.858585858585859e-05,
"loss": 1.1364,
"step": 116
},
{
"epoch": 1.76,
"learning_rate": 5.90909090909091e-05,
"loss": 1.1744,
"step": 117
},
{
"epoch": 1.78,
"learning_rate": 5.959595959595959e-05,
"loss": 1.1322,
"step": 118
},
{
"epoch": 1.79,
"learning_rate": 6.01010101010101e-05,
"loss": 1.0931,
"step": 119
},
{
"epoch": 1.81,
"learning_rate": 6.060606060606061e-05,
"loss": 1.1242,
"step": 120
},
{
"epoch": 1.83,
"learning_rate": 6.111111111111112e-05,
"loss": 1.1163,
"step": 121
},
{
"epoch": 1.84,
"learning_rate": 6.161616161616162e-05,
"loss": 1.3074,
"step": 122
},
{
"epoch": 1.86,
"learning_rate": 6.212121212121213e-05,
"loss": 1.213,
"step": 123
},
{
"epoch": 1.87,
"learning_rate": 6.262626262626264e-05,
"loss": 1.0974,
"step": 124
},
{
"epoch": 1.89,
"learning_rate": 6.313131313131313e-05,
"loss": 1.1272,
"step": 125
},
{
"epoch": 1.9,
"learning_rate": 6.363636363636364e-05,
"loss": 1.2024,
"step": 126
},
{
"epoch": 1.92,
"learning_rate": 6.414141414141415e-05,
"loss": 1.1247,
"step": 127
},
{
"epoch": 1.93,
"learning_rate": 6.464646464646466e-05,
"loss": 1.1386,
"step": 128
},
{
"epoch": 1.95,
"learning_rate": 6.515151515151516e-05,
"loss": 1.2468,
"step": 129
},
{
"epoch": 1.96,
"learning_rate": 6.565656565656566e-05,
"loss": 1.1624,
"step": 130
},
{
"epoch": 1.98,
"learning_rate": 6.616161616161617e-05,
"loss": 1.0378,
"step": 131
},
{
"epoch": 1.99,
"learning_rate": 6.666666666666667e-05,
"loss": 1.1028,
"step": 132
},
{
"epoch": 2.01,
"learning_rate": 6.717171717171718e-05,
"loss": 1.1575,
"step": 133
},
{
"epoch": 2.02,
"learning_rate": 6.767676767676769e-05,
"loss": 1.0586,
"step": 134
},
{
"epoch": 2.04,
"learning_rate": 6.818181818181818e-05,
"loss": 1.0942,
"step": 135
},
{
"epoch": 2.05,
"learning_rate": 6.86868686868687e-05,
"loss": 1.0912,
"step": 136
},
{
"epoch": 2.07,
"learning_rate": 6.91919191919192e-05,
"loss": 1.0711,
"step": 137
},
{
"epoch": 2.08,
"learning_rate": 6.96969696969697e-05,
"loss": 1.1007,
"step": 138
},
{
"epoch": 2.1,
"learning_rate": 7.020202020202021e-05,
"loss": 1.2377,
"step": 139
},
{
"epoch": 2.11,
"learning_rate": 7.07070707070707e-05,
"loss": 1.0448,
"step": 140
},
{
"epoch": 2.13,
"learning_rate": 7.121212121212121e-05,
"loss": 1.0147,
"step": 141
},
{
"epoch": 2.14,
"learning_rate": 7.171717171717171e-05,
"loss": 1.1971,
"step": 142
},
{
"epoch": 2.16,
"learning_rate": 7.222222222222222e-05,
"loss": 1.0587,
"step": 143
},
{
"epoch": 2.17,
"learning_rate": 7.272727272727273e-05,
"loss": 1.0985,
"step": 144
},
{
"epoch": 2.19,
"learning_rate": 7.323232323232324e-05,
"loss": 1.2073,
"step": 145
},
{
"epoch": 2.2,
"learning_rate": 7.373737373737373e-05,
"loss": 1.1526,
"step": 146
},
{
"epoch": 2.22,
"learning_rate": 7.424242424242424e-05,
"loss": 1.0646,
"step": 147
},
{
"epoch": 2.23,
"learning_rate": 7.474747474747475e-05,
"loss": 1.1812,
"step": 148
},
{
"epoch": 2.25,
"learning_rate": 7.525252525252525e-05,
"loss": 1.0482,
"step": 149
},
{
"epoch": 2.26,
"learning_rate": 7.575757575757576e-05,
"loss": 1.1301,
"step": 150
},
{
"epoch": 2.28,
"learning_rate": 7.626262626262627e-05,
"loss": 0.9766,
"step": 151
},
{
"epoch": 2.29,
"learning_rate": 7.676767676767676e-05,
"loss": 1.1234,
"step": 152
},
{
"epoch": 2.31,
"learning_rate": 7.727272727272727e-05,
"loss": 1.0825,
"step": 153
},
{
"epoch": 2.32,
"learning_rate": 7.777777777777778e-05,
"loss": 1.1252,
"step": 154
},
{
"epoch": 2.34,
"learning_rate": 7.828282828282829e-05,
"loss": 1.0601,
"step": 155
},
{
"epoch": 2.35,
"learning_rate": 7.878787878787879e-05,
"loss": 1.132,
"step": 156
},
{
"epoch": 2.37,
"learning_rate": 7.92929292929293e-05,
"loss": 1.1978,
"step": 157
},
{
"epoch": 2.38,
"learning_rate": 7.97979797979798e-05,
"loss": 1.0783,
"step": 158
},
{
"epoch": 2.4,
"learning_rate": 8.03030303030303e-05,
"loss": 1.1149,
"step": 159
},
{
"epoch": 2.41,
"learning_rate": 8.080808080808081e-05,
"loss": 1.0897,
"step": 160
},
{
"epoch": 2.43,
"learning_rate": 8.131313131313132e-05,
"loss": 1.1671,
"step": 161
},
{
"epoch": 2.44,
"learning_rate": 8.181818181818183e-05,
"loss": 0.9917,
"step": 162
},
{
"epoch": 2.46,
"learning_rate": 8.232323232323233e-05,
"loss": 1.0764,
"step": 163
},
{
"epoch": 2.47,
"learning_rate": 8.282828282828283e-05,
"loss": 1.2091,
"step": 164
},
{
"epoch": 2.49,
"learning_rate": 8.333333333333334e-05,
"loss": 0.9721,
"step": 165
},
{
"epoch": 2.5,
"learning_rate": 8.383838383838384e-05,
"loss": 1.0641,
"step": 166
},
{
"epoch": 2.52,
"learning_rate": 8.434343434343435e-05,
"loss": 1.1268,
"step": 167
},
{
"epoch": 2.53,
"learning_rate": 8.484848484848486e-05,
"loss": 1.1053,
"step": 168
},
{
"epoch": 2.55,
"learning_rate": 8.535353535353535e-05,
"loss": 0.9693,
"step": 169
},
{
"epoch": 2.56,
"learning_rate": 8.585858585858586e-05,
"loss": 1.2105,
"step": 170
},
{
"epoch": 2.58,
"learning_rate": 8.636363636363637e-05,
"loss": 1.1085,
"step": 171
},
{
"epoch": 2.59,
"learning_rate": 8.686868686868688e-05,
"loss": 1.102,
"step": 172
},
{
"epoch": 2.61,
"learning_rate": 8.737373737373738e-05,
"loss": 1.1063,
"step": 173
},
{
"epoch": 2.62,
"learning_rate": 8.787878787878789e-05,
"loss": 1.0495,
"step": 174
},
{
"epoch": 2.64,
"learning_rate": 8.83838383838384e-05,
"loss": 1.0666,
"step": 175
},
{
"epoch": 2.65,
"learning_rate": 8.888888888888889e-05,
"loss": 1.0146,
"step": 176
},
{
"epoch": 2.67,
"learning_rate": 8.93939393939394e-05,
"loss": 1.0375,
"step": 177
},
{
"epoch": 2.68,
"learning_rate": 8.98989898989899e-05,
"loss": 1.0957,
"step": 178
},
{
"epoch": 2.7,
"learning_rate": 9.040404040404041e-05,
"loss": 0.9608,
"step": 179
},
{
"epoch": 2.71,
"learning_rate": 9.090909090909092e-05,
"loss": 1.1103,
"step": 180
},
{
"epoch": 2.73,
"learning_rate": 9.141414141414141e-05,
"loss": 1.0886,
"step": 181
},
{
"epoch": 2.75,
"learning_rate": 9.191919191919192e-05,
"loss": 1.0851,
"step": 182
},
{
"epoch": 2.76,
"learning_rate": 9.242424242424242e-05,
"loss": 1.0472,
"step": 183
},
{
"epoch": 2.78,
"learning_rate": 9.292929292929293e-05,
"loss": 1.1018,
"step": 184
},
{
"epoch": 2.79,
"learning_rate": 9.343434343434344e-05,
"loss": 1.1315,
"step": 185
},
{
"epoch": 2.81,
"learning_rate": 9.393939393939395e-05,
"loss": 1.1845,
"step": 186
},
{
"epoch": 2.82,
"learning_rate": 9.444444444444444e-05,
"loss": 1.0339,
"step": 187
},
{
"epoch": 2.84,
"learning_rate": 9.494949494949495e-05,
"loss": 1.054,
"step": 188
},
{
"epoch": 2.85,
"learning_rate": 9.545454545454546e-05,
"loss": 0.9609,
"step": 189
},
{
"epoch": 2.87,
"learning_rate": 9.595959595959596e-05,
"loss": 1.2661,
"step": 190
},
{
"epoch": 2.88,
"learning_rate": 9.646464646464647e-05,
"loss": 1.0543,
"step": 191
},
{
"epoch": 2.9,
"learning_rate": 9.696969696969698e-05,
"loss": 1.0177,
"step": 192
},
{
"epoch": 2.91,
"learning_rate": 9.747474747474747e-05,
"loss": 1.0964,
"step": 193
},
{
"epoch": 2.93,
"learning_rate": 9.797979797979798e-05,
"loss": 1.2337,
"step": 194
},
{
"epoch": 2.94,
"learning_rate": 9.848484848484849e-05,
"loss": 1.0947,
"step": 195
},
{
"epoch": 2.96,
"learning_rate": 9.8989898989899e-05,
"loss": 1.2549,
"step": 196
},
{
"epoch": 2.97,
"learning_rate": 9.94949494949495e-05,
"loss": 1.1435,
"step": 197
},
{
"epoch": 2.99,
"learning_rate": 0.0001,
"loss": 0.9866,
"step": 198
},
{
"epoch": 3.0,
"learning_rate": 0.0001005050505050505,
"loss": 1.1122,
"step": 199
},
{
"epoch": 3.02,
"learning_rate": 0.00010101010101010102,
"loss": 0.9689,
"step": 200
},
{
"epoch": 3.03,
"learning_rate": 0.00010151515151515152,
"loss": 0.9857,
"step": 201
},
{
"epoch": 3.05,
"learning_rate": 0.00010202020202020202,
"loss": 1.0618,
"step": 202
},
{
"epoch": 3.06,
"learning_rate": 0.00010252525252525254,
"loss": 0.9173,
"step": 203
},
{
"epoch": 3.08,
"learning_rate": 0.00010303030303030303,
"loss": 1.0439,
"step": 204
},
{
"epoch": 3.09,
"learning_rate": 0.00010353535353535353,
"loss": 0.9341,
"step": 205
},
{
"epoch": 3.11,
"learning_rate": 0.00010404040404040405,
"loss": 0.9435,
"step": 206
},
{
"epoch": 3.12,
"learning_rate": 0.00010454545454545455,
"loss": 0.9463,
"step": 207
},
{
"epoch": 3.14,
"learning_rate": 0.00010505050505050507,
"loss": 0.9608,
"step": 208
},
{
"epoch": 3.15,
"learning_rate": 0.00010555555555555557,
"loss": 0.9932,
"step": 209
},
{
"epoch": 3.17,
"learning_rate": 0.00010606060606060606,
"loss": 1.0082,
"step": 210
},
{
"epoch": 3.18,
"learning_rate": 0.00010656565656565659,
"loss": 0.9094,
"step": 211
},
{
"epoch": 3.2,
"learning_rate": 0.00010707070707070708,
"loss": 0.926,
"step": 212
},
{
"epoch": 3.21,
"learning_rate": 0.00010757575757575758,
"loss": 0.9295,
"step": 213
},
{
"epoch": 3.23,
"learning_rate": 0.00010808080808080809,
"loss": 0.8823,
"step": 214
},
{
"epoch": 3.24,
"learning_rate": 0.0001085858585858586,
"loss": 0.984,
"step": 215
},
{
"epoch": 3.26,
"learning_rate": 0.00010909090909090909,
"loss": 1.0788,
"step": 216
},
{
"epoch": 3.27,
"learning_rate": 0.0001095959595959596,
"loss": 1.0356,
"step": 217
},
{
"epoch": 3.29,
"learning_rate": 0.00011010101010101011,
"loss": 0.9449,
"step": 218
},
{
"epoch": 3.3,
"learning_rate": 0.00011060606060606061,
"loss": 1.0918,
"step": 219
},
{
"epoch": 3.32,
"learning_rate": 0.00011111111111111112,
"loss": 0.9733,
"step": 220
},
{
"epoch": 3.33,
"learning_rate": 0.00011161616161616161,
"loss": 1.0341,
"step": 221
},
{
"epoch": 3.35,
"learning_rate": 0.00011212121212121212,
"loss": 0.981,
"step": 222
},
{
"epoch": 3.36,
"learning_rate": 0.00011262626262626263,
"loss": 0.9898,
"step": 223
},
{
"epoch": 3.38,
"learning_rate": 0.00011313131313131313,
"loss": 0.9891,
"step": 224
},
{
"epoch": 3.39,
"learning_rate": 0.00011363636363636365,
"loss": 0.9681,
"step": 225
},
{
"epoch": 3.41,
"learning_rate": 0.00011414141414141415,
"loss": 0.9518,
"step": 226
},
{
"epoch": 3.42,
"learning_rate": 0.00011464646464646464,
"loss": 0.9959,
"step": 227
},
{
"epoch": 3.44,
"learning_rate": 0.00011515151515151516,
"loss": 0.9998,
"step": 228
},
{
"epoch": 3.45,
"learning_rate": 0.00011565656565656566,
"loss": 0.8444,
"step": 229
},
{
"epoch": 3.47,
"learning_rate": 0.00011616161616161616,
"loss": 1.0036,
"step": 230
},
{
"epoch": 3.48,
"learning_rate": 0.00011666666666666668,
"loss": 0.9296,
"step": 231
},
{
"epoch": 3.5,
"learning_rate": 0.00011717171717171717,
"loss": 0.9785,
"step": 232
},
{
"epoch": 3.51,
"learning_rate": 0.00011767676767676767,
"loss": 0.9004,
"step": 233
},
{
"epoch": 3.53,
"learning_rate": 0.0001181818181818182,
"loss": 1.0123,
"step": 234
},
{
"epoch": 3.54,
"learning_rate": 0.00011868686868686869,
"loss": 1.1656,
"step": 235
},
{
"epoch": 3.56,
"learning_rate": 0.00011919191919191919,
"loss": 0.9841,
"step": 236
},
{
"epoch": 3.57,
"learning_rate": 0.00011969696969696971,
"loss": 0.8851,
"step": 237
},
{
"epoch": 3.59,
"learning_rate": 0.0001202020202020202,
"loss": 1.0571,
"step": 238
},
{
"epoch": 3.6,
"learning_rate": 0.0001207070707070707,
"loss": 0.9695,
"step": 239
},
{
"epoch": 3.62,
"learning_rate": 0.00012121212121212122,
"loss": 0.7582,
"step": 240
},
{
"epoch": 3.63,
"learning_rate": 0.00012171717171717172,
"loss": 1.0994,
"step": 241
},
{
"epoch": 3.65,
"learning_rate": 0.00012222222222222224,
"loss": 0.9242,
"step": 242
},
{
"epoch": 3.67,
"learning_rate": 0.00012272727272727272,
"loss": 0.96,
"step": 243
},
{
"epoch": 3.68,
"learning_rate": 0.00012323232323232323,
"loss": 1.0824,
"step": 244
},
{
"epoch": 3.7,
"learning_rate": 0.00012373737373737374,
"loss": 1.0037,
"step": 245
},
{
"epoch": 3.71,
"learning_rate": 0.00012424242424242425,
"loss": 0.8877,
"step": 246
},
{
"epoch": 3.73,
"learning_rate": 0.00012474747474747473,
"loss": 0.9342,
"step": 247
},
{
"epoch": 3.74,
"learning_rate": 0.00012525252525252527,
"loss": 1.0559,
"step": 248
},
{
"epoch": 3.76,
"learning_rate": 0.00012575757575757575,
"loss": 1.0001,
"step": 249
},
{
"epoch": 3.77,
"learning_rate": 0.00012626262626262626,
"loss": 0.873,
"step": 250
},
{
"epoch": 3.79,
"learning_rate": 0.00012676767676767677,
"loss": 0.8952,
"step": 251
},
{
"epoch": 3.8,
"learning_rate": 0.00012727272727272728,
"loss": 1.0204,
"step": 252
},
{
"epoch": 3.82,
"learning_rate": 0.00012777777777777776,
"loss": 0.9089,
"step": 253
},
{
"epoch": 3.83,
"learning_rate": 0.0001282828282828283,
"loss": 0.959,
"step": 254
},
{
"epoch": 3.85,
"learning_rate": 0.00012878787878787878,
"loss": 1.0861,
"step": 255
},
{
"epoch": 3.86,
"learning_rate": 0.00012929292929292932,
"loss": 0.9781,
"step": 256
},
{
"epoch": 3.88,
"learning_rate": 0.0001297979797979798,
"loss": 1.0855,
"step": 257
},
{
"epoch": 3.89,
"learning_rate": 0.0001303030303030303,
"loss": 0.9624,
"step": 258
},
{
"epoch": 3.91,
"learning_rate": 0.00013080808080808082,
"loss": 1.0674,
"step": 259
},
{
"epoch": 3.92,
"learning_rate": 0.00013131313131313133,
"loss": 1.0015,
"step": 260
},
{
"epoch": 3.94,
"learning_rate": 0.0001318181818181818,
"loss": 1.0056,
"step": 261
},
{
"epoch": 3.95,
"learning_rate": 0.00013232323232323235,
"loss": 0.9579,
"step": 262
},
{
"epoch": 3.97,
"learning_rate": 0.00013282828282828283,
"loss": 1.0703,
"step": 263
},
{
"epoch": 3.98,
"learning_rate": 0.00013333333333333334,
"loss": 1.1343,
"step": 264
},
{
"epoch": 4.0,
"learning_rate": 0.00013383838383838385,
"loss": 0.9392,
"step": 265
},
{
"epoch": 4.01,
"learning_rate": 0.00013434343434343436,
"loss": 0.9109,
"step": 266
},
{
"epoch": 4.03,
"learning_rate": 0.00013484848484848484,
"loss": 0.8773,
"step": 267
},
{
"epoch": 4.04,
"learning_rate": 0.00013535353535353538,
"loss": 0.8556,
"step": 268
},
{
"epoch": 4.06,
"learning_rate": 0.00013585858585858586,
"loss": 0.7971,
"step": 269
},
{
"epoch": 4.07,
"learning_rate": 0.00013636363636363637,
"loss": 0.7486,
"step": 270
},
{
"epoch": 4.09,
"learning_rate": 0.00013686868686868688,
"loss": 0.7878,
"step": 271
},
{
"epoch": 4.1,
"learning_rate": 0.0001373737373737374,
"loss": 0.735,
"step": 272
},
{
"epoch": 4.12,
"learning_rate": 0.0001378787878787879,
"loss": 0.81,
"step": 273
},
{
"epoch": 4.13,
"learning_rate": 0.0001383838383838384,
"loss": 0.8716,
"step": 274
},
{
"epoch": 4.15,
"learning_rate": 0.0001388888888888889,
"loss": 0.7618,
"step": 275
},
{
"epoch": 4.16,
"learning_rate": 0.0001393939393939394,
"loss": 0.8475,
"step": 276
},
{
"epoch": 4.18,
"learning_rate": 0.0001398989898989899,
"loss": 0.8456,
"step": 277
},
{
"epoch": 4.19,
"learning_rate": 0.00014040404040404042,
"loss": 0.8424,
"step": 278
},
{
"epoch": 4.21,
"learning_rate": 0.00014090909090909093,
"loss": 0.9333,
"step": 279
},
{
"epoch": 4.22,
"learning_rate": 0.0001414141414141414,
"loss": 0.8743,
"step": 280
},
{
"epoch": 4.24,
"learning_rate": 0.00014191919191919192,
"loss": 0.9118,
"step": 281
},
{
"epoch": 4.25,
"learning_rate": 0.00014242424242424243,
"loss": 0.8716,
"step": 282
},
{
"epoch": 4.27,
"learning_rate": 0.00014292929292929294,
"loss": 0.8154,
"step": 283
},
{
"epoch": 4.28,
"learning_rate": 0.00014343434343434342,
"loss": 0.7588,
"step": 284
},
{
"epoch": 4.3,
"learning_rate": 0.00014393939393939396,
"loss": 0.8305,
"step": 285
},
{
"epoch": 4.31,
"learning_rate": 0.00014444444444444444,
"loss": 0.7744,
"step": 286
},
{
"epoch": 4.33,
"learning_rate": 0.00014494949494949495,
"loss": 0.9332,
"step": 287
},
{
"epoch": 4.34,
"learning_rate": 0.00014545454545454546,
"loss": 0.8516,
"step": 288
},
{
"epoch": 4.36,
"learning_rate": 0.00014595959595959597,
"loss": 0.9717,
"step": 289
},
{
"epoch": 4.37,
"learning_rate": 0.00014646464646464648,
"loss": 0.8857,
"step": 290
},
{
"epoch": 4.39,
"learning_rate": 0.00014696969696969698,
"loss": 0.8565,
"step": 291
},
{
"epoch": 4.4,
"learning_rate": 0.00014747474747474747,
"loss": 0.8423,
"step": 292
},
{
"epoch": 4.42,
"learning_rate": 0.000147979797979798,
"loss": 0.7805,
"step": 293
},
{
"epoch": 4.43,
"learning_rate": 0.00014848484848484849,
"loss": 0.7872,
"step": 294
},
{
"epoch": 4.45,
"learning_rate": 0.000148989898989899,
"loss": 0.7916,
"step": 295
},
{
"epoch": 4.46,
"learning_rate": 0.0001494949494949495,
"loss": 0.8366,
"step": 296
},
{
"epoch": 4.48,
"learning_rate": 0.00015000000000000001,
"loss": 0.8232,
"step": 297
},
{
"epoch": 4.49,
"learning_rate": 0.0001505050505050505,
"loss": 0.849,
"step": 298
},
{
"epoch": 4.51,
"learning_rate": 0.00015101010101010103,
"loss": 0.8255,
"step": 299
},
{
"epoch": 4.52,
"learning_rate": 0.00015151515151515152,
"loss": 0.8112,
"step": 300
},
{
"epoch": 4.54,
"learning_rate": 0.00015202020202020202,
"loss": 0.8022,
"step": 301
},
{
"epoch": 4.56,
"learning_rate": 0.00015252525252525253,
"loss": 0.7779,
"step": 302
},
{
"epoch": 4.57,
"learning_rate": 0.00015303030303030304,
"loss": 0.8785,
"step": 303
},
{
"epoch": 4.59,
"learning_rate": 0.00015353535353535353,
"loss": 0.8064,
"step": 304
},
{
"epoch": 4.6,
"learning_rate": 0.00015404040404040406,
"loss": 0.8665,
"step": 305
},
{
"epoch": 4.62,
"learning_rate": 0.00015454545454545454,
"loss": 0.8263,
"step": 306
},
{
"epoch": 4.63,
"learning_rate": 0.00015505050505050508,
"loss": 0.8513,
"step": 307
},
{
"epoch": 4.65,
"learning_rate": 0.00015555555555555556,
"loss": 0.7776,
"step": 308
},
{
"epoch": 4.66,
"learning_rate": 0.00015606060606060607,
"loss": 0.8738,
"step": 309
},
{
"epoch": 4.68,
"learning_rate": 0.00015656565656565658,
"loss": 0.8859,
"step": 310
},
{
"epoch": 4.69,
"learning_rate": 0.0001570707070707071,
"loss": 0.8391,
"step": 311
},
{
"epoch": 4.71,
"learning_rate": 0.00015757575757575757,
"loss": 0.7208,
"step": 312
},
{
"epoch": 4.72,
"learning_rate": 0.00015808080808080808,
"loss": 0.8693,
"step": 313
},
{
"epoch": 4.74,
"learning_rate": 0.0001585858585858586,
"loss": 0.9205,
"step": 314
},
{
"epoch": 4.75,
"learning_rate": 0.0001590909090909091,
"loss": 0.8755,
"step": 315
},
{
"epoch": 4.77,
"learning_rate": 0.0001595959595959596,
"loss": 0.8039,
"step": 316
},
{
"epoch": 4.78,
"learning_rate": 0.00016010101010101012,
"loss": 0.7777,
"step": 317
},
{
"epoch": 4.8,
"learning_rate": 0.0001606060606060606,
"loss": 0.7352,
"step": 318
},
{
"epoch": 4.81,
"learning_rate": 0.0001611111111111111,
"loss": 0.8669,
"step": 319
},
{
"epoch": 4.83,
"learning_rate": 0.00016161616161616162,
"loss": 0.871,
"step": 320
},
{
"epoch": 4.84,
"learning_rate": 0.00016212121212121213,
"loss": 0.9014,
"step": 321
},
{
"epoch": 4.86,
"learning_rate": 0.00016262626262626264,
"loss": 0.973,
"step": 322
},
{
"epoch": 4.87,
"learning_rate": 0.00016313131313131312,
"loss": 0.8274,
"step": 323
},
{
"epoch": 4.89,
"learning_rate": 0.00016363636363636366,
"loss": 0.8801,
"step": 324
},
{
"epoch": 4.9,
"learning_rate": 0.00016414141414141414,
"loss": 0.9279,
"step": 325
},
{
"epoch": 4.92,
"learning_rate": 0.00016464646464646465,
"loss": 0.8946,
"step": 326
},
{
"epoch": 4.93,
"learning_rate": 0.00016515151515151516,
"loss": 0.8768,
"step": 327
},
{
"epoch": 4.95,
"learning_rate": 0.00016565656565656567,
"loss": 0.9011,
"step": 328
},
{
"epoch": 4.96,
"learning_rate": 0.00016616161616161615,
"loss": 0.8496,
"step": 329
},
{
"epoch": 4.98,
"learning_rate": 0.0001666666666666667,
"loss": 0.7764,
"step": 330
},
{
"epoch": 4.99,
"learning_rate": 0.00016717171717171717,
"loss": 0.9669,
"step": 331
},
{
"epoch": 5.01,
"learning_rate": 0.00016767676767676768,
"loss": 0.8556,
"step": 332
},
{
"epoch": 5.02,
"learning_rate": 0.0001681818181818182,
"loss": 0.6294,
"step": 333
},
{
"epoch": 5.04,
"learning_rate": 0.0001686868686868687,
"loss": 0.5905,
"step": 334
},
{
"epoch": 5.05,
"learning_rate": 0.00016919191919191918,
"loss": 0.6959,
"step": 335
},
{
"epoch": 5.07,
"learning_rate": 0.00016969696969696972,
"loss": 0.7869,
"step": 336
},
{
"epoch": 5.08,
"learning_rate": 0.0001702020202020202,
"loss": 0.6294,
"step": 337
},
{
"epoch": 5.1,
"learning_rate": 0.0001707070707070707,
"loss": 0.6231,
"step": 338
},
{
"epoch": 5.11,
"learning_rate": 0.00017121212121212122,
"loss": 0.6398,
"step": 339
},
{
"epoch": 5.13,
"learning_rate": 0.00017171717171717173,
"loss": 0.6709,
"step": 340
},
{
"epoch": 5.14,
"learning_rate": 0.00017222222222222224,
"loss": 0.6276,
"step": 341
},
{
"epoch": 5.16,
"learning_rate": 0.00017272727272727275,
"loss": 0.7472,
"step": 342
},
{
"epoch": 5.17,
"learning_rate": 0.00017323232323232323,
"loss": 0.6593,
"step": 343
},
{
"epoch": 5.19,
"learning_rate": 0.00017373737373737377,
"loss": 0.6895,
"step": 344
},
{
"epoch": 5.2,
"learning_rate": 0.00017424242424242425,
"loss": 0.6663,
"step": 345
},
{
"epoch": 5.22,
"learning_rate": 0.00017474747474747476,
"loss": 0.7473,
"step": 346
},
{
"epoch": 5.23,
"learning_rate": 0.00017525252525252527,
"loss": 0.6482,
"step": 347
},
{
"epoch": 5.25,
"learning_rate": 0.00017575757575757578,
"loss": 0.7009,
"step": 348
},
{
"epoch": 5.26,
"learning_rate": 0.00017626262626262626,
"loss": 0.7085,
"step": 349
},
{
"epoch": 5.28,
"learning_rate": 0.0001767676767676768,
"loss": 0.6697,
"step": 350
},
{
"epoch": 5.29,
"learning_rate": 0.00017727272727272728,
"loss": 0.6169,
"step": 351
},
{
"epoch": 5.31,
"learning_rate": 0.00017777777777777779,
"loss": 0.6844,
"step": 352
},
{
"epoch": 5.32,
"learning_rate": 0.0001782828282828283,
"loss": 0.8004,
"step": 353
},
{
"epoch": 5.34,
"learning_rate": 0.0001787878787878788,
"loss": 0.7215,
"step": 354
},
{
"epoch": 5.35,
"learning_rate": 0.00017929292929292931,
"loss": 0.6962,
"step": 355
},
{
"epoch": 5.37,
"learning_rate": 0.0001797979797979798,
"loss": 0.6727,
"step": 356
},
{
"epoch": 5.38,
"learning_rate": 0.0001803030303030303,
"loss": 0.6453,
"step": 357
},
{
"epoch": 5.4,
"learning_rate": 0.00018080808080808082,
"loss": 0.7153,
"step": 358
},
{
"epoch": 5.41,
"learning_rate": 0.00018131313131313132,
"loss": 0.6596,
"step": 359
},
{
"epoch": 5.43,
"learning_rate": 0.00018181818181818183,
"loss": 0.667,
"step": 360
},
{
"epoch": 5.44,
"learning_rate": 0.00018232323232323234,
"loss": 0.859,
"step": 361
},
{
"epoch": 5.46,
"learning_rate": 0.00018282828282828283,
"loss": 0.7127,
"step": 362
},
{
"epoch": 5.48,
"learning_rate": 0.00018333333333333334,
"loss": 0.7715,
"step": 363
},
{
"epoch": 5.49,
"learning_rate": 0.00018383838383838384,
"loss": 0.674,
"step": 364
},
{
"epoch": 5.51,
"learning_rate": 0.00018434343434343435,
"loss": 0.716,
"step": 365
},
{
"epoch": 5.52,
"learning_rate": 0.00018484848484848484,
"loss": 0.692,
"step": 366
},
{
"epoch": 5.54,
"learning_rate": 0.00018535353535353537,
"loss": 0.7767,
"step": 367
},
{
"epoch": 5.55,
"learning_rate": 0.00018585858585858586,
"loss": 0.7128,
"step": 368
},
{
"epoch": 5.57,
"learning_rate": 0.00018636363636363636,
"loss": 0.7044,
"step": 369
},
{
"epoch": 5.58,
"learning_rate": 0.00018686868686868687,
"loss": 0.7437,
"step": 370
},
{
"epoch": 5.6,
"learning_rate": 0.00018737373737373738,
"loss": 0.8031,
"step": 371
},
{
"epoch": 5.61,
"learning_rate": 0.0001878787878787879,
"loss": 0.7541,
"step": 372
},
{
"epoch": 5.63,
"learning_rate": 0.0001883838383838384,
"loss": 0.792,
"step": 373
},
{
"epoch": 5.64,
"learning_rate": 0.00018888888888888888,
"loss": 0.7628,
"step": 374
},
{
"epoch": 5.66,
"learning_rate": 0.00018939393939393942,
"loss": 0.7543,
"step": 375
},
{
"epoch": 5.67,
"learning_rate": 0.0001898989898989899,
"loss": 0.7325,
"step": 376
},
{
"epoch": 5.69,
"learning_rate": 0.0001904040404040404,
"loss": 0.6901,
"step": 377
},
{
"epoch": 5.7,
"learning_rate": 0.00019090909090909092,
"loss": 0.6723,
"step": 378
},
{
"epoch": 5.72,
"learning_rate": 0.00019141414141414143,
"loss": 0.7287,
"step": 379
},
{
"epoch": 5.73,
"learning_rate": 0.00019191919191919191,
"loss": 0.7178,
"step": 380
},
{
"epoch": 5.75,
"learning_rate": 0.00019242424242424245,
"loss": 0.8214,
"step": 381
},
{
"epoch": 5.76,
"learning_rate": 0.00019292929292929293,
"loss": 0.6586,
"step": 382
},
{
"epoch": 5.78,
"learning_rate": 0.00019343434343434344,
"loss": 0.7933,
"step": 383
},
{
"epoch": 5.79,
"learning_rate": 0.00019393939393939395,
"loss": 0.7253,
"step": 384
},
{
"epoch": 5.81,
"learning_rate": 0.00019444444444444446,
"loss": 0.7694,
"step": 385
},
{
"epoch": 5.82,
"learning_rate": 0.00019494949494949494,
"loss": 0.795,
"step": 386
},
{
"epoch": 5.84,
"learning_rate": 0.00019545454545454548,
"loss": 0.7402,
"step": 387
},
{
"epoch": 5.85,
"learning_rate": 0.00019595959595959596,
"loss": 0.7058,
"step": 388
},
{
"epoch": 5.87,
"learning_rate": 0.0001964646464646465,
"loss": 0.786,
"step": 389
},
{
"epoch": 5.88,
"learning_rate": 0.00019696969696969698,
"loss": 0.7699,
"step": 390
},
{
"epoch": 5.9,
"learning_rate": 0.0001974747474747475,
"loss": 0.8197,
"step": 391
},
{
"epoch": 5.91,
"learning_rate": 0.000197979797979798,
"loss": 0.8621,
"step": 392
},
{
"epoch": 5.93,
"learning_rate": 0.0001984848484848485,
"loss": 0.7168,
"step": 393
},
{
"epoch": 5.94,
"learning_rate": 0.000198989898989899,
"loss": 0.7006,
"step": 394
},
{
"epoch": 5.96,
"learning_rate": 0.0001994949494949495,
"loss": 0.765,
"step": 395
},
{
"epoch": 5.97,
"learning_rate": 0.0002,
"loss": 0.7282,
"step": 396
},
{
"epoch": 5.99,
"learning_rate": 0.00019999291961666908,
"loss": 0.8135,
"step": 397
},
{
"epoch": 6.0,
"learning_rate": 0.0001999716794693129,
"loss": 0.6931,
"step": 398
},
{
"epoch": 6.02,
"learning_rate": 0.0001999362825656992,
"loss": 0.5689,
"step": 399
},
{
"epoch": 6.03,
"learning_rate": 0.0001998867339183008,
"loss": 0.5693,
"step": 400
},
{
"epoch": 6.05,
"learning_rate": 0.00019982304054358614,
"loss": 0.5369,
"step": 401
},
{
"epoch": 6.06,
"learning_rate": 0.00019974521146102537,
"loss": 0.4887,
"step": 402
},
{
"epoch": 6.08,
"learning_rate": 0.00019965325769181325,
"loss": 0.5723,
"step": 403
},
{
"epoch": 6.09,
"learning_rate": 0.00019954719225730847,
"loss": 0.6284,
"step": 404
},
{
"epoch": 6.11,
"learning_rate": 0.00019942703017718975,
"loss": 0.5439,
"step": 405
},
{
"epoch": 6.12,
"learning_rate": 0.00019929278846732884,
"loss": 0.5521,
"step": 406
},
{
"epoch": 6.14,
"learning_rate": 0.00019914448613738106,
"loss": 0.5535,
"step": 407
},
{
"epoch": 6.15,
"learning_rate": 0.0001989821441880933,
"loss": 0.4884,
"step": 408
},
{
"epoch": 6.17,
"learning_rate": 0.00019880578560833016,
"loss": 0.549,
"step": 409
},
{
"epoch": 6.18,
"learning_rate": 0.00019861543537181867,
"loss": 0.5644,
"step": 410
},
{
"epoch": 6.2,
"learning_rate": 0.0001984111204336116,
"loss": 0.5865,
"step": 411
},
{
"epoch": 6.21,
"learning_rate": 0.00019819286972627066,
"loss": 0.5844,
"step": 412
},
{
"epoch": 6.23,
"learning_rate": 0.00019796071415576925,
"loss": 0.6734,
"step": 413
},
{
"epoch": 6.24,
"learning_rate": 0.00019771468659711595,
"loss": 0.5724,
"step": 414
},
{
"epoch": 6.26,
"learning_rate": 0.0001974548218896993,
"loss": 0.5014,
"step": 415
},
{
"epoch": 6.27,
"learning_rate": 0.00019718115683235417,
"loss": 0.5318,
"step": 416
},
{
"epoch": 6.29,
"learning_rate": 0.00019689373017815073,
"loss": 0.574,
"step": 417
},
{
"epoch": 6.3,
"learning_rate": 0.00019659258262890683,
"loss": 0.4451,
"step": 418
},
{
"epoch": 6.32,
"learning_rate": 0.0001962777568294242,
"loss": 0.5648,
"step": 419
},
{
"epoch": 6.33,
"learning_rate": 0.00019594929736144976,
"loss": 0.5006,
"step": 420
},
{
"epoch": 6.35,
"learning_rate": 0.00019560725073736226,
"loss": 0.6189,
"step": 421
},
{
"epoch": 6.37,
"learning_rate": 0.00019525166539358606,
"loss": 0.5462,
"step": 422
},
{
"epoch": 6.38,
"learning_rate": 0.00019488259168373197,
"loss": 0.5923,
"step": 423
},
{
"epoch": 6.4,
"learning_rate": 0.00019450008187146684,
"loss": 0.5371,
"step": 424
},
{
"epoch": 6.41,
"learning_rate": 0.00019410419012311268,
"loss": 0.6752,
"step": 425
},
{
"epoch": 6.43,
"learning_rate": 0.0001936949724999762,
"loss": 0.6184,
"step": 426
},
{
"epoch": 6.44,
"learning_rate": 0.0001932724869504101,
"loss": 0.6163,
"step": 427
},
{
"epoch": 6.46,
"learning_rate": 0.00019283679330160726,
"loss": 0.5992,
"step": 428
},
{
"epoch": 6.47,
"learning_rate": 0.0001923879532511287,
"loss": 0.6189,
"step": 429
},
{
"epoch": 6.49,
"learning_rate": 0.00019192603035816656,
"loss": 0.6301,
"step": 430
},
{
"epoch": 6.5,
"learning_rate": 0.00019145109003454396,
"loss": 0.6293,
"step": 431
},
{
"epoch": 6.52,
"learning_rate": 0.00019096319953545185,
"loss": 0.64,
"step": 432
},
{
"epoch": 6.53,
"learning_rate": 0.00019046242794992538,
"loss": 0.6398,
"step": 433
},
{
"epoch": 6.55,
"learning_rate": 0.00018994884619106031,
"loss": 0.593,
"step": 434
},
{
"epoch": 6.56,
"learning_rate": 0.00018942252698597113,
"loss": 0.6103,
"step": 435
},
{
"epoch": 6.58,
"learning_rate": 0.00018888354486549237,
"loss": 0.6142,
"step": 436
},
{
"epoch": 6.59,
"learning_rate": 0.0001883319761536244,
"loss": 0.583,
"step": 437
},
{
"epoch": 6.61,
"learning_rate": 0.00018776789895672558,
"loss": 0.5881,
"step": 438
},
{
"epoch": 6.62,
"learning_rate": 0.00018719139315245148,
"loss": 0.5929,
"step": 439
},
{
"epoch": 6.64,
"learning_rate": 0.00018660254037844388,
"loss": 0.6417,
"step": 440
},
{
"epoch": 6.65,
"learning_rate": 0.00018600142402077006,
"loss": 0.5681,
"step": 441
},
{
"epoch": 6.67,
"learning_rate": 0.0001853881292021148,
"loss": 0.627,
"step": 442
},
{
"epoch": 6.68,
"learning_rate": 0.00018476274276972636,
"loss": 0.5517,
"step": 443
},
{
"epoch": 6.7,
"learning_rate": 0.00018412535328311814,
"loss": 0.5575,
"step": 444
},
{
"epoch": 6.71,
"learning_rate": 0.00018347605100152802,
"loss": 0.7175,
"step": 445
},
{
"epoch": 6.73,
"learning_rate": 0.00018281492787113708,
"loss": 0.6409,
"step": 446
},
{
"epoch": 6.74,
"learning_rate": 0.00018214207751204918,
"loss": 0.6176,
"step": 447
},
{
"epoch": 6.76,
"learning_rate": 0.00018145759520503358,
"loss": 0.6328,
"step": 448
},
{
"epoch": 6.77,
"learning_rate": 0.00018076157787803268,
"loss": 0.6437,
"step": 449
},
{
"epoch": 6.79,
"learning_rate": 0.00018005412409243606,
"loss": 0.6153,
"step": 450
},
{
"epoch": 6.8,
"learning_rate": 0.00017933533402912354,
"loss": 0.639,
"step": 451
},
{
"epoch": 6.82,
"learning_rate": 0.00017860530947427875,
"loss": 0.5672,
"step": 452
},
{
"epoch": 6.83,
"learning_rate": 0.00017786415380497553,
"loss": 0.5955,
"step": 453
},
{
"epoch": 6.85,
"learning_rate": 0.00017711197197453878,
"loss": 0.6168,
"step": 454
},
{
"epoch": 6.86,
"learning_rate": 0.00017634887049768237,
"loss": 0.694,
"step": 455
},
{
"epoch": 6.88,
"learning_rate": 0.00017557495743542585,
"loss": 0.6137,
"step": 456
},
{
"epoch": 6.89,
"learning_rate": 0.0001747903423797921,
"loss": 0.6617,
"step": 457
},
{
"epoch": 6.91,
"learning_rate": 0.0001739951364382884,
"loss": 0.6298,
"step": 458
},
{
"epoch": 6.92,
"learning_rate": 0.00017318945221817255,
"loss": 0.5342,
"step": 459
},
{
"epoch": 6.94,
"learning_rate": 0.00017237340381050703,
"loss": 0.6858,
"step": 460
},
{
"epoch": 6.95,
"learning_rate": 0.00017154710677400265,
"loss": 0.6101,
"step": 461
},
{
"epoch": 6.97,
"learning_rate": 0.00017071067811865476,
"loss": 0.5749,
"step": 462
},
{
"epoch": 6.98,
"learning_rate": 0.00016986423628917346,
"loss": 0.6245,
"step": 463
},
{
"epoch": 7.0,
"learning_rate": 0.00016900790114821122,
"loss": 0.6779,
"step": 464
},
{
"epoch": 7.01,
"learning_rate": 0.00016814179395938913,
"loss": 0.4573,
"step": 465
},
{
"epoch": 7.03,
"learning_rate": 0.00016726603737012529,
"loss": 0.4555,
"step": 466
},
{
"epoch": 7.04,
"learning_rate": 0.00016638075539426677,
"loss": 0.414,
"step": 467
},
{
"epoch": 7.06,
"learning_rate": 0.00016548607339452853,
"loss": 0.3871,
"step": 468
},
{
"epoch": 7.07,
"learning_rate": 0.00016458211806474088,
"loss": 0.5154,
"step": 469
},
{
"epoch": 7.09,
"learning_rate": 0.00016366901741190882,
"loss": 0.3657,
"step": 470
},
{
"epoch": 7.1,
"learning_rate": 0.0001627469007380852,
"loss": 0.4374,
"step": 471
},
{
"epoch": 7.12,
"learning_rate": 0.00016181589862206052,
"loss": 0.4454,
"step": 472
},
{
"epoch": 7.13,
"learning_rate": 0.00016087614290087208,
"loss": 0.4105,
"step": 473
},
{
"epoch": 7.15,
"learning_rate": 0.0001599277666511347,
"loss": 0.4332,
"step": 474
},
{
"epoch": 7.16,
"learning_rate": 0.0001589709041701962,
"loss": 0.4608,
"step": 475
},
{
"epoch": 7.18,
"learning_rate": 0.00015800569095711982,
"loss": 0.4211,
"step": 476
},
{
"epoch": 7.19,
"learning_rate": 0.0001570322636934964,
"loss": 0.4464,
"step": 477
},
{
"epoch": 7.21,
"learning_rate": 0.0001560507602240894,
"loss": 0.4389,
"step": 478
},
{
"epoch": 7.22,
"learning_rate": 0.0001550613195373149,
"loss": 0.4094,
"step": 479
},
{
"epoch": 7.24,
"learning_rate": 0.00015406408174555976,
"loss": 0.4297,
"step": 480
},
{
"epoch": 7.25,
"learning_rate": 0.00015305918806534082,
"loss": 0.3955,
"step": 481
},
{
"epoch": 7.27,
"learning_rate": 0.00015204678079730724,
"loss": 0.3662,
"step": 482
},
{
"epoch": 7.29,
"learning_rate": 0.00015102700330609,
"loss": 0.4057,
"step": 483
},
{
"epoch": 7.3,
"learning_rate": 0.00015000000000000001,
"loss": 0.4313,
"step": 484
},
{
"epoch": 7.32,
"learning_rate": 0.00014896591631057912,
"loss": 0.4156,
"step": 485
},
{
"epoch": 7.33,
"learning_rate": 0.0001479248986720057,
"loss": 0.4419,
"step": 486
},
{
"epoch": 7.35,
"learning_rate": 0.00014687709450035837,
"loss": 0.3956,
"step": 487
},
{
"epoch": 7.36,
"learning_rate": 0.00014582265217274104,
"loss": 0.3943,
"step": 488
},
{
"epoch": 7.38,
"learning_rate": 0.00014476172100627127,
"loss": 0.364,
"step": 489
},
{
"epoch": 7.39,
"learning_rate": 0.00014369445123693596,
"loss": 0.3883,
"step": 490
},
{
"epoch": 7.41,
"learning_rate": 0.00014262099399831683,
"loss": 0.4107,
"step": 491
},
{
"epoch": 7.42,
"learning_rate": 0.00014154150130018866,
"loss": 0.4114,
"step": 492
},
{
"epoch": 7.44,
"learning_rate": 0.0001404561260069935,
"loss": 0.4161,
"step": 493
},
{
"epoch": 7.45,
"learning_rate": 0.00013936502181619416,
"loss": 0.4574,
"step": 494
},
{
"epoch": 7.47,
"learning_rate": 0.000138268343236509,
"loss": 0.404,
"step": 495
},
{
"epoch": 7.48,
"learning_rate": 0.00013716624556603274,
"loss": 0.4327,
"step": 496
},
{
"epoch": 7.5,
"learning_rate": 0.000136058884870245,
"loss": 0.3808,
"step": 497
},
{
"epoch": 7.51,
"learning_rate": 0.00013494641795990986,
"loss": 0.4414,
"step": 498
},
{
"epoch": 7.53,
"learning_rate": 0.00013382900236887075,
"loss": 0.444,
"step": 499
},
{
"epoch": 7.54,
"learning_rate": 0.00013270679633174218,
"loss": 0.4098,
"step": 500
},
{
"epoch": 7.56,
"learning_rate": 0.0001315799587615025,
"loss": 0.5061,
"step": 501
},
{
"epoch": 7.57,
"learning_rate": 0.0001304486492269907,
"loss": 0.4303,
"step": 502
},
{
"epoch": 7.59,
"learning_rate": 0.0001293130279303102,
"loss": 0.4687,
"step": 503
},
{
"epoch": 7.6,
"learning_rate": 0.00012817325568414297,
"loss": 0.4191,
"step": 504
},
{
"epoch": 7.62,
"learning_rate": 0.0001270294938889773,
"loss": 0.4531,
"step": 505
},
{
"epoch": 7.63,
"learning_rate": 0.00012588190451025207,
"loss": 0.4888,
"step": 506
},
{
"epoch": 7.65,
"learning_rate": 0.00012473065005542155,
"loss": 0.4177,
"step": 507
},
{
"epoch": 7.66,
"learning_rate": 0.00012357589355094275,
"loss": 0.5206,
"step": 508
},
{
"epoch": 7.68,
"learning_rate": 0.0001224177985191897,
"loss": 0.453,
"step": 509
},
{
"epoch": 7.69,
"learning_rate": 0.00012125652895529766,
"loss": 0.4442,
"step": 510
},
{
"epoch": 7.71,
"learning_rate": 0.00012009224930393988,
"loss": 0.4883,
"step": 511
},
{
"epoch": 7.72,
"learning_rate": 0.00011892512443604102,
"loss": 0.4499,
"step": 512
},
{
"epoch": 7.74,
"learning_rate": 0.00011775531962543036,
"loss": 0.5023,
"step": 513
},
{
"epoch": 7.75,
"learning_rate": 0.00011658300052543742,
"loss": 0.3726,
"step": 514
},
{
"epoch": 7.77,
"learning_rate": 0.00011540833314543458,
"loss": 0.4412,
"step": 515
},
{
"epoch": 7.78,
"learning_rate": 0.00011423148382732853,
"loss": 0.4233,
"step": 516
},
{
"epoch": 7.8,
"learning_rate": 0.00011305261922200519,
"loss": 0.5323,
"step": 517
},
{
"epoch": 7.81,
"learning_rate": 0.00011187190626573052,
"loss": 0.4949,
"step": 518
},
{
"epoch": 7.83,
"learning_rate": 0.00011068951215651132,
"loss": 0.3815,
"step": 519
},
{
"epoch": 7.84,
"learning_rate": 0.00010950560433041826,
"loss": 0.3834,
"step": 520
},
{
"epoch": 7.86,
"learning_rate": 0.00010832035043787625,
"loss": 0.4408,
"step": 521
},
{
"epoch": 7.87,
"learning_rate": 0.00010713391831992323,
"loss": 0.4821,
"step": 522
},
{
"epoch": 7.89,
"learning_rate": 0.00010594647598444312,
"loss": 0.382,
"step": 523
},
{
"epoch": 7.9,
"learning_rate": 0.00010475819158237425,
"loss": 0.4725,
"step": 524
},
{
"epoch": 7.92,
"learning_rate": 0.00010356923338389806,
"loss": 0.3808,
"step": 525
},
{
"epoch": 7.93,
"learning_rate": 0.00010237976975461075,
"loss": 0.421,
"step": 526
},
{
"epoch": 7.95,
"learning_rate": 0.00010118996913168144,
"loss": 0.4451,
"step": 527
},
{
"epoch": 7.96,
"learning_rate": 0.0001,
"loss": 0.433,
"step": 528
},
{
"epoch": 7.98,
"learning_rate": 9.881003086831859e-05,
"loss": 0.4279,
"step": 529
},
{
"epoch": 7.99,
"learning_rate": 9.762023024538926e-05,
"loss": 0.4398,
"step": 530
},
{
"epoch": 8.01,
"learning_rate": 9.643076661610196e-05,
"loss": 0.3526,
"step": 531
},
{
"epoch": 8.02,
"learning_rate": 9.524180841762577e-05,
"loss": 0.3066,
"step": 532
},
{
"epoch": 8.04,
"learning_rate": 9.405352401555691e-05,
"loss": 0.2691,
"step": 533
},
{
"epoch": 8.05,
"learning_rate": 9.286608168007678e-05,
"loss": 0.2725,
"step": 534
},
{
"epoch": 8.07,
"learning_rate": 9.167964956212378e-05,
"loss": 0.2821,
"step": 535
},
{
"epoch": 8.08,
"learning_rate": 9.049439566958175e-05,
"loss": 0.277,
"step": 536
},
{
"epoch": 8.1,
"learning_rate": 8.931048784348875e-05,
"loss": 0.2472,
"step": 537
},
{
"epoch": 8.11,
"learning_rate": 8.812809373426951e-05,
"loss": 0.2742,
"step": 538
},
{
"epoch": 8.13,
"learning_rate": 8.694738077799488e-05,
"loss": 0.3079,
"step": 539
},
{
"epoch": 8.14,
"learning_rate": 8.57685161726715e-05,
"loss": 0.2882,
"step": 540
},
{
"epoch": 8.16,
"learning_rate": 8.459166685456547e-05,
"loss": 0.3107,
"step": 541
},
{
"epoch": 8.17,
"learning_rate": 8.34169994745626e-05,
"loss": 0.2603,
"step": 542
},
{
"epoch": 8.19,
"learning_rate": 8.224468037456969e-05,
"loss": 0.2487,
"step": 543
},
{
"epoch": 8.21,
"learning_rate": 8.107487556395901e-05,
"loss": 0.3226,
"step": 544
},
{
"epoch": 8.22,
"learning_rate": 7.990775069606012e-05,
"loss": 0.2915,
"step": 545
},
{
"epoch": 8.24,
"learning_rate": 7.874347104470234e-05,
"loss": 0.2517,
"step": 546
},
{
"epoch": 8.25,
"learning_rate": 7.758220148081028e-05,
"loss": 0.305,
"step": 547
},
{
"epoch": 8.27,
"learning_rate": 7.642410644905726e-05,
"loss": 0.3541,
"step": 548
},
{
"epoch": 8.28,
"learning_rate": 7.526934994457844e-05,
"loss": 0.2514,
"step": 549
},
{
"epoch": 8.3,
"learning_rate": 7.411809548974792e-05,
"loss": 0.3586,
"step": 550
},
{
"epoch": 8.31,
"learning_rate": 7.297050611102272e-05,
"loss": 0.2921,
"step": 551
},
{
"epoch": 8.33,
"learning_rate": 7.182674431585704e-05,
"loss": 0.2393,
"step": 552
},
{
"epoch": 8.34,
"learning_rate": 7.068697206968979e-05,
"loss": 0.2893,
"step": 553
},
{
"epoch": 8.36,
"learning_rate": 6.955135077300931e-05,
"loss": 0.3069,
"step": 554
},
{
"epoch": 8.37,
"learning_rate": 6.842004123849752e-05,
"loss": 0.3323,
"step": 555
},
{
"epoch": 8.39,
"learning_rate": 6.729320366825784e-05,
"loss": 0.283,
"step": 556
},
{
"epoch": 8.4,
"learning_rate": 6.617099763112929e-05,
"loss": 0.2652,
"step": 557
},
{
"epoch": 8.42,
"learning_rate": 6.505358204009017e-05,
"loss": 0.2642,
"step": 558
},
{
"epoch": 8.43,
"learning_rate": 6.394111512975504e-05,
"loss": 0.2811,
"step": 559
},
{
"epoch": 8.45,
"learning_rate": 6.283375443396726e-05,
"loss": 0.329,
"step": 560
},
{
"epoch": 8.46,
"learning_rate": 6.173165676349103e-05,
"loss": 0.28,
"step": 561
},
{
"epoch": 8.48,
"learning_rate": 6.063497818380587e-05,
"loss": 0.28,
"step": 562
},
{
"epoch": 8.49,
"learning_rate": 5.9543873993006496e-05,
"loss": 0.3122,
"step": 563
},
{
"epoch": 8.51,
"learning_rate": 5.845849869981137e-05,
"loss": 0.2803,
"step": 564
},
{
"epoch": 8.52,
"learning_rate": 5.73790060016832e-05,
"loss": 0.3026,
"step": 565
},
{
"epoch": 8.54,
"learning_rate": 5.630554876306407e-05,
"loss": 0.2672,
"step": 566
},
{
"epoch": 8.55,
"learning_rate": 5.5238278993728756e-05,
"loss": 0.2843,
"step": 567
},
{
"epoch": 8.57,
"learning_rate": 5.417734782725896e-05,
"loss": 0.2332,
"step": 568
},
{
"epoch": 8.58,
"learning_rate": 5.3122905499641615e-05,
"loss": 0.2727,
"step": 569
},
{
"epoch": 8.6,
"learning_rate": 5.207510132799436e-05,
"loss": 0.3152,
"step": 570
},
{
"epoch": 8.61,
"learning_rate": 5.1034083689420905e-05,
"loss": 0.2891,
"step": 571
},
{
"epoch": 8.63,
"learning_rate": 5.000000000000002e-05,
"loss": 0.3056,
"step": 572
},
{
"epoch": 8.64,
"learning_rate": 4.8972996693910054e-05,
"loss": 0.2962,
"step": 573
},
{
"epoch": 8.66,
"learning_rate": 4.795321920269279e-05,
"loss": 0.3021,
"step": 574
},
{
"epoch": 8.67,
"learning_rate": 4.694081193465921e-05,
"loss": 0.2582,
"step": 575
},
{
"epoch": 8.69,
"learning_rate": 4.593591825444028e-05,
"loss": 0.2658,
"step": 576
},
{
"epoch": 8.7,
"learning_rate": 4.493868046268514e-05,
"loss": 0.3138,
"step": 577
},
{
"epoch": 8.72,
"learning_rate": 4.394923977591059e-05,
"loss": 0.2751,
"step": 578
},
{
"epoch": 8.73,
"learning_rate": 4.296773630650358e-05,
"loss": 0.2633,
"step": 579
},
{
"epoch": 8.75,
"learning_rate": 4.19943090428802e-05,
"loss": 0.3056,
"step": 580
},
{
"epoch": 8.76,
"learning_rate": 4.10290958298038e-05,
"loss": 0.2989,
"step": 581
},
{
"epoch": 8.78,
"learning_rate": 4.007223334886531e-05,
"loss": 0.258,
"step": 582
},
{
"epoch": 8.79,
"learning_rate": 3.9123857099127936e-05,
"loss": 0.3036,
"step": 583
},
{
"epoch": 8.81,
"learning_rate": 3.8184101377939476e-05,
"loss": 0.3249,
"step": 584
},
{
"epoch": 8.82,
"learning_rate": 3.725309926191479e-05,
"loss": 0.2593,
"step": 585
},
{
"epoch": 8.84,
"learning_rate": 3.6330982588091186e-05,
"loss": 0.2858,
"step": 586
},
{
"epoch": 8.85,
"learning_rate": 3.541788193525913e-05,
"loss": 0.2881,
"step": 587
},
{
"epoch": 8.87,
"learning_rate": 3.45139266054715e-05,
"loss": 0.244,
"step": 588
},
{
"epoch": 8.88,
"learning_rate": 3.361924460573325e-05,
"loss": 0.2691,
"step": 589
},
{
"epoch": 8.9,
"learning_rate": 3.273396262987475e-05,
"loss": 0.2898,
"step": 590
},
{
"epoch": 8.91,
"learning_rate": 3.185820604061088e-05,
"loss": 0.3022,
"step": 591
},
{
"epoch": 8.93,
"learning_rate": 3.099209885178882e-05,
"loss": 0.2656,
"step": 592
},
{
"epoch": 8.94,
"learning_rate": 3.013576371082655e-05,
"loss": 0.2368,
"step": 593
},
{
"epoch": 8.96,
"learning_rate": 2.9289321881345254e-05,
"loss": 0.328,
"step": 594
},
{
"epoch": 8.97,
"learning_rate": 2.8452893225997346e-05,
"loss": 0.3093,
"step": 595
},
{
"epoch": 8.99,
"learning_rate": 2.7626596189492983e-05,
"loss": 0.2982,
"step": 596
},
{
"epoch": 9.0,
"learning_rate": 2.681054778182748e-05,
"loss": 0.2176,
"step": 597
},
{
"epoch": 9.02,
"learning_rate": 2.6004863561711635e-05,
"loss": 0.2075,
"step": 598
},
{
"epoch": 9.03,
"learning_rate": 2.5209657620207915e-05,
"loss": 0.2337,
"step": 599
},
{
"epoch": 9.05,
"learning_rate": 2.4425042564574184e-05,
"loss": 0.2136,
"step": 600
},
{
"epoch": 9.06,
"learning_rate": 2.3651129502317647e-05,
"loss": 0.1918,
"step": 601
},
{
"epoch": 9.08,
"learning_rate": 2.288802802546124e-05,
"loss": 0.1969,
"step": 602
},
{
"epoch": 9.1,
"learning_rate": 2.2135846195024513e-05,
"loss": 0.1933,
"step": 603
},
{
"epoch": 9.11,
"learning_rate": 2.139469052572127e-05,
"loss": 0.2262,
"step": 604
},
{
"epoch": 9.13,
"learning_rate": 2.0664665970876496e-05,
"loss": 0.192,
"step": 605
},
{
"epoch": 9.14,
"learning_rate": 1.994587590756397e-05,
"loss": 0.1849,
"step": 606
},
{
"epoch": 9.16,
"learning_rate": 1.923842212196735e-05,
"loss": 0.213,
"step": 607
},
{
"epoch": 9.17,
"learning_rate": 1.854240479496643e-05,
"loss": 0.1907,
"step": 608
},
{
"epoch": 9.19,
"learning_rate": 1.7857922487950874e-05,
"loss": 0.2197,
"step": 609
},
{
"epoch": 9.2,
"learning_rate": 1.7185072128862933e-05,
"loss": 0.226,
"step": 610
},
{
"epoch": 9.22,
"learning_rate": 1.6523948998471973e-05,
"loss": 0.1816,
"step": 611
},
{
"epoch": 9.23,
"learning_rate": 1.587464671688187e-05,
"loss": 0.2111,
"step": 612
},
{
"epoch": 9.25,
"learning_rate": 1.523725723027365e-05,
"loss": 0.19,
"step": 613
},
{
"epoch": 9.26,
"learning_rate": 1.4611870797885197e-05,
"loss": 0.2407,
"step": 614
},
{
"epoch": 9.28,
"learning_rate": 1.3998575979229944e-05,
"loss": 0.1747,
"step": 615
},
{
"epoch": 9.29,
"learning_rate": 1.339745962155613e-05,
"loss": 0.1693,
"step": 616
},
{
"epoch": 9.31,
"learning_rate": 1.280860684754852e-05,
"loss": 0.2539,
"step": 617
},
{
"epoch": 9.32,
"learning_rate": 1.2232101043274436e-05,
"loss": 0.1913,
"step": 618
},
{
"epoch": 9.34,
"learning_rate": 1.166802384637561e-05,
"loss": 0.2216,
"step": 619
},
{
"epoch": 9.35,
"learning_rate": 1.1116455134507664e-05,
"loss": 0.2111,
"step": 620
},
{
"epoch": 9.37,
"learning_rate": 1.057747301402887e-05,
"loss": 0.2153,
"step": 621
},
{
"epoch": 9.38,
"learning_rate": 1.0051153808939685e-05,
"loss": 0.1951,
"step": 622
},
{
"epoch": 9.4,
"learning_rate": 9.537572050074618e-06,
"loss": 0.1915,
"step": 623
},
{
"epoch": 9.41,
"learning_rate": 9.036800464548157e-06,
"loss": 0.2099,
"step": 624
},
{
"epoch": 9.43,
"learning_rate": 8.548909965456065e-06,
"loss": 0.1805,
"step": 625
},
{
"epoch": 9.44,
"learning_rate": 8.073969641833445e-06,
"loss": 0.2221,
"step": 626
},
{
"epoch": 9.46,
"learning_rate": 7.612046748871327e-06,
"loss": 0.2165,
"step": 627
},
{
"epoch": 9.47,
"learning_rate": 7.163206698392744e-06,
"loss": 0.199,
"step": 628
},
{
"epoch": 9.49,
"learning_rate": 6.7275130495899175e-06,
"loss": 0.1879,
"step": 629
},
{
"epoch": 9.5,
"learning_rate": 6.3050275000238414e-06,
"loss": 0.2181,
"step": 630
},
{
"epoch": 9.52,
"learning_rate": 5.895809876887326e-06,
"loss": 0.1951,
"step": 631
},
{
"epoch": 9.53,
"learning_rate": 5.499918128533155e-06,
"loss": 0.1942,
"step": 632
},
{
"epoch": 9.55,
"learning_rate": 5.1174083162680465e-06,
"loss": 0.2205,
"step": 633
},
{
"epoch": 9.56,
"learning_rate": 4.748334606413951e-06,
"loss": 0.2113,
"step": 634
},
{
"epoch": 9.58,
"learning_rate": 4.392749262637752e-06,
"loss": 0.2094,
"step": 635
},
{
"epoch": 9.59,
"learning_rate": 4.050702638550275e-06,
"loss": 0.1757,
"step": 636
},
{
"epoch": 9.61,
"learning_rate": 3.722243170575801e-06,
"loss": 0.2251,
"step": 637
},
{
"epoch": 9.62,
"learning_rate": 3.40741737109318e-06,
"loss": 0.1949,
"step": 638
},
{
"epoch": 9.64,
"learning_rate": 3.1062698218492724e-06,
"loss": 0.2234,
"step": 639
},
{
"epoch": 9.65,
"learning_rate": 2.818843167645835e-06,
"loss": 0.2287,
"step": 640
},
{
"epoch": 9.67,
"learning_rate": 2.5451781103006944e-06,
"loss": 0.1959,
"step": 641
},
{
"epoch": 9.68,
"learning_rate": 2.2853134028840594e-06,
"loss": 0.1696,
"step": 642
},
{
"epoch": 9.7,
"learning_rate": 2.039285844230765e-06,
"loss": 0.1919,
"step": 643
},
{
"epoch": 9.71,
"learning_rate": 1.8071302737293295e-06,
"loss": 0.1944,
"step": 644
},
{
"epoch": 9.73,
"learning_rate": 1.5888795663883904e-06,
"loss": 0.1871,
"step": 645
},
{
"epoch": 9.74,
"learning_rate": 1.3845646281813507e-06,
"loss": 0.2535,
"step": 646
},
{
"epoch": 9.76,
"learning_rate": 1.1942143916698457e-06,
"loss": 0.1789,
"step": 647
},
{
"epoch": 9.77,
"learning_rate": 1.0178558119067315e-06,
"loss": 0.1722,
"step": 648
},
{
"epoch": 9.79,
"learning_rate": 8.555138626189618e-07,
"loss": 0.2148,
"step": 649
},
{
"epoch": 9.8,
"learning_rate": 7.072115326711704e-07,
"loss": 0.1889,
"step": 650
},
{
"epoch": 9.82,
"learning_rate": 5.729698228102653e-07,
"loss": 0.1746,
"step": 651
},
{
"epoch": 9.83,
"learning_rate": 4.5280774269154115e-07,
"loss": 0.1984,
"step": 652
},
{
"epoch": 9.85,
"learning_rate": 3.467423081867649e-07,
"loss": 0.2096,
"step": 653
},
{
"epoch": 9.86,
"learning_rate": 2.547885389746485e-07,
"loss": 0.2252,
"step": 654
},
{
"epoch": 9.88,
"learning_rate": 1.7695945641386102e-07,
"loss": 0.1878,
"step": 655
},
{
"epoch": 9.89,
"learning_rate": 1.1326608169920372e-07,
"loss": 0.1954,
"step": 656
},
{
"epoch": 9.91,
"learning_rate": 6.37174343008251e-08,
"loss": 0.1886,
"step": 657
},
{
"epoch": 9.92,
"learning_rate": 2.8320530687098166e-08,
"loss": 0.251,
"step": 658
},
{
"epoch": 9.94,
"learning_rate": 7.080383330915208e-09,
"loss": 0.2084,
"step": 659
},
{
"epoch": 9.95,
"learning_rate": 0.0,
"loss": 0.2066,
"step": 660
},
{
"epoch": 3.96,
"learning_rate": 0.00010657942003278107,
"loss": 1.6314,
"step": 661
},
{
"epoch": 3.96,
"learning_rate": 0.00010564058429494808,
"loss": 1.5509,
"step": 662
},
{
"epoch": 3.97,
"learning_rate": 0.00010470124952590977,
"loss": 1.6615,
"step": 663
},
{
"epoch": 3.97,
"learning_rate": 0.00010376149883006406,
"loss": 1.3416,
"step": 664
},
{
"epoch": 3.98,
"learning_rate": 0.0001028214153486066,
"loss": 1.4959,
"step": 665
},
{
"epoch": 3.99,
"learning_rate": 0.00010188108225217516,
"loss": 1.2929,
"step": 666
},
{
"epoch": 3.99,
"learning_rate": 0.00010094058273349125,
"loss": 1.2884,
"step": 667
},
{
"epoch": 4.0,
"learning_rate": 0.0001,
"loss": 1.272,
"step": 668
},
{
"epoch": 4.0,
"learning_rate": 9.90594172665088e-05,
"loss": 1.2148,
"step": 669
},
{
"epoch": 4.01,
"learning_rate": 9.811891774782484e-05,
"loss": 1.2181,
"step": 670
},
{
"epoch": 4.02,
"learning_rate": 9.717858465139342e-05,
"loss": 1.2137,
"step": 671
},
{
"epoch": 4.02,
"learning_rate": 9.623850116993596e-05,
"loss": 1.1983,
"step": 672
},
{
"epoch": 4.03,
"learning_rate": 9.529875047409027e-05,
"loss": 1.1848,
"step": 673
},
{
"epoch": 4.03,
"learning_rate": 9.43594157050519e-05,
"loss": 1.0902,
"step": 674
},
{
"epoch": 4.04,
"learning_rate": 9.342057996721894e-05,
"loss": 1.1571,
"step": 675
},
{
"epoch": 4.05,
"learning_rate": 9.248232632083923e-05,
"loss": 0.9283,
"step": 676
},
{
"epoch": 4.05,
"learning_rate": 9.15447377746621e-05,
"loss": 1.1645,
"step": 677
},
{
"epoch": 4.06,
"learning_rate": 9.060789727859457e-05,
"loss": 1.0975,
"step": 678
},
{
"epoch": 4.06,
"learning_rate": 8.967188771636236e-05,
"loss": 0.9576,
"step": 679
},
{
"epoch": 4.07,
"learning_rate": 8.873679189817732e-05,
"loss": 1.1544,
"step": 680
},
{
"epoch": 4.08,
"learning_rate": 8.78026925534108e-05,
"loss": 1.0777,
"step": 681
},
{
"epoch": 4.08,
"learning_rate": 8.686967232327478e-05,
"loss": 1.0782,
"step": 682
},
{
"epoch": 4.09,
"learning_rate": 8.59378137535102e-05,
"loss": 1.1202,
"step": 683
},
{
"epoch": 4.09,
"learning_rate": 8.500719928708405e-05,
"loss": 1.1844,
"step": 684
},
{
"epoch": 4.1,
"learning_rate": 8.407791125689578e-05,
"loss": 1.0473,
"step": 685
},
{
"epoch": 4.11,
"learning_rate": 8.315003187849277e-05,
"loss": 1.0613,
"step": 686
},
{
"epoch": 4.11,
"learning_rate": 8.222364324279689e-05,
"loss": 1.1134,
"step": 687
},
{
"epoch": 4.12,
"learning_rate": 8.129882730884168e-05,
"loss": 1.0871,
"step": 688
},
{
"epoch": 4.12,
"learning_rate": 8.037566589652141e-05,
"loss": 1.1047,
"step": 689
},
{
"epoch": 4.13,
"learning_rate": 7.945424067935217e-05,
"loss": 1.024,
"step": 690
},
{
"epoch": 4.14,
"learning_rate": 7.853463317724614e-05,
"loss": 1.2049,
"step": 691
},
{
"epoch": 4.14,
"learning_rate": 7.761692474929961e-05,
"loss": 1.0818,
"step": 692
},
{
"epoch": 4.15,
"learning_rate": 7.67011965865947e-05,
"loss": 1.2091,
"step": 693
},
{
"epoch": 4.15,
"learning_rate": 7.578752970501642e-05,
"loss": 1.1569,
"step": 694
},
{
"epoch": 4.16,
"learning_rate": 7.487600493808513e-05,
"loss": 1.0476,
"step": 695
},
{
"epoch": 4.17,
"learning_rate": 7.396670292980492e-05,
"loss": 0.8989,
"step": 696
},
{
"epoch": 4.17,
"learning_rate": 7.305970412752909e-05,
"loss": 1.0927,
"step": 697
},
{
"epoch": 4.18,
"learning_rate": 7.215508877484258e-05,
"loss": 1.1766,
"step": 698
},
{
"epoch": 4.18,
"learning_rate": 7.125293690446306e-05,
"loss": 1.2596,
"step": 699
},
{
"epoch": 4.19,
"learning_rate": 7.035332833115997e-05,
"loss": 1.1233,
"step": 700
},
{
"epoch": 4.2,
"learning_rate": 6.945634264469339e-05,
"loss": 0.997,
"step": 701
},
{
"epoch": 4.2,
"learning_rate": 6.85620592027725e-05,
"loss": 1.0831,
"step": 702
},
{
"epoch": 4.21,
"learning_rate": 6.76705571240348e-05,
"loss": 1.0424,
"step": 703
},
{
"epoch": 4.21,
"learning_rate": 6.678191528104629e-05,
"loss": 1.2312,
"step": 704
},
{
"epoch": 4.22,
"learning_rate": 6.58962122933234e-05,
"loss": 1.0623,
"step": 705
},
{
"epoch": 4.23,
"learning_rate": 6.501352652037764e-05,
"loss": 1.0416,
"step": 706
},
{
"epoch": 4.23,
"learning_rate": 6.413393605478275e-05,
"loss": 0.9957,
"step": 707
},
{
"epoch": 4.24,
"learning_rate": 6.325751871526588e-05,
"loss": 0.9729,
"step": 708
},
{
"epoch": 4.24,
"learning_rate": 6.238435203982278e-05,
"loss": 1.124,
"step": 709
},
{
"epoch": 4.25,
"learning_rate": 6.151451327885805e-05,
"loss": 1.0767,
"step": 710
},
{
"epoch": 4.26,
"learning_rate": 6.0648079388350466e-05,
"loss": 1.0175,
"step": 711
},
{
"epoch": 4.26,
"learning_rate": 5.978512702304461e-05,
"loss": 0.9669,
"step": 712
},
{
"epoch": 4.27,
"learning_rate": 5.892573252966926e-05,
"loss": 1.1609,
"step": 713
},
{
"epoch": 4.27,
"learning_rate": 5.806997194018271e-05,
"loss": 1.0129,
"step": 714
},
{
"epoch": 4.28,
"learning_rate": 5.721792096504611e-05,
"loss": 1.0527,
"step": 715
},
{
"epoch": 4.29,
"learning_rate": 5.636965498652544e-05,
"loss": 1.0049,
"step": 716
},
{
"epoch": 4.29,
"learning_rate": 5.5525249052022076e-05,
"loss": 1.0176,
"step": 717
},
{
"epoch": 4.3,
"learning_rate": 5.4684777867433335e-05,
"loss": 0.9946,
"step": 718
},
{
"epoch": 4.3,
"learning_rate": 5.3848315790543126e-05,
"loss": 0.9892,
"step": 719
},
{
"epoch": 4.31,
"learning_rate": 5.301593682444352e-05,
"loss": 1.0541,
"step": 720
},
{
"epoch": 4.32,
"learning_rate": 5.218771461098733e-05,
"loss": 1.0958,
"step": 721
},
{
"epoch": 4.32,
"learning_rate": 5.1363722424273156e-05,
"loss": 0.9798,
"step": 722
},
{
"epoch": 4.33,
"learning_rate": 5.054403316416247e-05,
"loss": 1.0182,
"step": 723
},
{
"epoch": 4.33,
"learning_rate": 4.972871934983032e-05,
"loss": 1.2027,
"step": 724
},
{
"epoch": 4.34,
"learning_rate": 4.891785311334923e-05,
"loss": 0.9431,
"step": 725
},
{
"epoch": 4.35,
"learning_rate": 4.811150619330759e-05,
"loss": 1.0192,
"step": 726
},
{
"epoch": 4.35,
"learning_rate": 4.7309749928463035e-05,
"loss": 1.0675,
"step": 727
},
{
"epoch": 4.36,
"learning_rate": 4.6512655251430695e-05,
"loss": 1.0731,
"step": 728
},
{
"epoch": 4.36,
"learning_rate": 4.5720292682407874e-05,
"loss": 0.9864,
"step": 729
},
{
"epoch": 4.37,
"learning_rate": 4.493273232293506e-05,
"loss": 1.1571,
"step": 730
},
{
"epoch": 4.38,
"learning_rate": 4.41500438496937e-05,
"loss": 0.9996,
"step": 731
},
{
"epoch": 4.38,
"learning_rate": 4.3372296508342026e-05,
"loss": 1.0229,
"step": 732
},
{
"epoch": 4.39,
"learning_rate": 4.2599559107388645e-05,
"loss": 0.9749,
"step": 733
},
{
"epoch": 4.39,
"learning_rate": 4.1831900012105105e-05,
"loss": 0.9654,
"step": 734
},
{
"epoch": 4.4,
"learning_rate": 4.10693871384773e-05,
"loss": 1.157,
"step": 735
},
{
"epoch": 4.41,
"learning_rate": 4.031208794719695e-05,
"loss": 1.1395,
"step": 736
},
{
"epoch": 4.41,
"learning_rate": 3.95600694376933e-05,
"loss": 0.9787,
"step": 737
},
{
"epoch": 4.42,
"learning_rate": 3.881339814220555e-05,
"loss": 1.1573,
"step": 738
},
{
"epoch": 4.42,
"learning_rate": 3.8072140119896504e-05,
"loss": 1.0817,
"step": 739
},
{
"epoch": 4.43,
"learning_rate": 3.733636095100838e-05,
"loss": 1.0108,
"step": 740
},
{
"epoch": 4.44,
"learning_rate": 3.660612573106081e-05,
"loss": 1.0674,
"step": 741
},
{
"epoch": 4.44,
"learning_rate": 3.588149906509166e-05,
"loss": 1.0425,
"step": 742
},
{
"epoch": 4.45,
"learning_rate": 3.5162545061941335e-05,
"loss": 0.9176,
"step": 743
},
{
"epoch": 4.45,
"learning_rate": 3.444932732858107e-05,
"loss": 0.949,
"step": 744
},
{
"epoch": 4.46,
"learning_rate": 3.3741908964485414e-05,
"loss": 1.0735,
"step": 745
},
{
"epoch": 4.47,
"learning_rate": 3.304035255604974e-05,
"loss": 1.0489,
"step": 746
},
{
"epoch": 4.47,
"learning_rate": 3.234472017105313e-05,
"loss": 1.014,
"step": 747
},
{
"epoch": 4.48,
"learning_rate": 3.1655073353167285e-05,
"loss": 1.0575,
"step": 748
},
{
"epoch": 4.48,
"learning_rate": 3.0971473116511394e-05,
"loss": 1.0651,
"step": 749
},
{
"epoch": 4.49,
"learning_rate": 3.0293979940254314e-05,
"loss": 1.038,
"step": 750
},
{
"epoch": 4.5,
"learning_rate": 2.9622653763263874e-05,
"loss": 1.0974,
"step": 751
},
{
"epoch": 4.5,
"learning_rate": 2.8957553978803898e-05,
"loss": 1.087,
"step": 752
},
{
"epoch": 4.51,
"learning_rate": 2.8298739429279707e-05,
"loss": 1.1009,
"step": 753
},
{
"epoch": 4.51,
"learning_rate": 2.764626840103208e-05,
"loss": 1.0945,
"step": 754
},
{
"epoch": 4.52,
"learning_rate": 2.7000198619180794e-05,
"loss": 1.0436,
"step": 755
},
{
"epoch": 4.53,
"learning_rate": 2.636058724251739e-05,
"loss": 1.1178,
"step": 756
},
{
"epoch": 4.53,
"learning_rate": 2.5727490858448288e-05,
"loss": 1.0239,
"step": 757
},
{
"epoch": 4.54,
"learning_rate": 2.510096547798857e-05,
"loss": 1.0082,
"step": 758
},
{
"epoch": 4.54,
"learning_rate": 2.4481066530806395e-05,
"loss": 0.9967,
"step": 759
},
{
"epoch": 4.55,
"learning_rate": 2.3867848860319144e-05,
"loss": 1.0141,
"step": 760
},
{
"epoch": 4.56,
"learning_rate": 2.3261366718841305e-05,
"loss": 1.0257,
"step": 761
},
{
"epoch": 4.56,
"learning_rate": 2.2661673762784807e-05,
"loss": 1.0973,
"step": 762
},
{
"epoch": 4.57,
"learning_rate": 2.206882304791176e-05,
"loss": 0.9521,
"step": 763
},
{
"epoch": 4.57,
"learning_rate": 2.148286702464063e-05,
"loss": 1.0711,
"step": 764
},
{
"epoch": 4.58,
"learning_rate": 2.0903857533405958e-05,
"loss": 0.9501,
"step": 765
},
{
"epoch": 4.58,
"learning_rate": 2.033184580007177e-05,
"loss": 1.016,
"step": 766
},
{
"epoch": 4.59,
"learning_rate": 1.9766882431399646e-05,
"loss": 1.0498,
"step": 767
},
{
"epoch": 4.6,
"learning_rate": 1.920901741057153e-05,
"loss": 1.1671,
"step": 768
},
{
"epoch": 4.6,
"learning_rate": 1.8658300092767544e-05,
"loss": 1.0246,
"step": 769
},
{
"epoch": 4.61,
"learning_rate": 1.8114779200799437e-05,
"loss": 0.9781,
"step": 770
},
{
"epoch": 4.61,
"learning_rate": 1.7578502820800045e-05,
"loss": 1.0746,
"step": 771
},
{
"epoch": 4.62,
"learning_rate": 1.7049518397969132e-05,
"loss": 1.0822,
"step": 772
},
{
"epoch": 4.63,
"learning_rate": 1.652787273237565e-05,
"loss": 1.0571,
"step": 773
},
{
"epoch": 4.63,
"learning_rate": 1.601361197481741e-05,
"loss": 0.9481,
"step": 774
},
{
"epoch": 4.64,
"learning_rate": 1.5506781622737942e-05,
"loss": 1.0077,
"step": 775
},
{
"epoch": 4.64,
"learning_rate": 1.500742651620145e-05,
"loss": 1.0027,
"step": 776
},
{
"epoch": 4.65,
"learning_rate": 1.4515590833925507e-05,
"loss": 0.9245,
"step": 777
},
{
"epoch": 4.66,
"learning_rate": 1.4031318089372615e-05,
"loss": 1.2058,
"step": 778
},
{
"epoch": 4.66,
"learning_rate": 1.3554651126900564e-05,
"loss": 1.1016,
"step": 779
},
{
"epoch": 4.67,
"learning_rate": 1.308563211797179e-05,
"loss": 1.1456,
"step": 780
},
{
"epoch": 4.67,
"learning_rate": 1.2624302557422473e-05,
"loss": 1.0946,
"step": 781
},
{
"epoch": 4.68,
"learning_rate": 1.2170703259791471e-05,
"loss": 1.2343,
"step": 782
},
{
"epoch": 4.69,
"learning_rate": 1.172487435570926e-05,
"loss": 1.006,
"step": 783
},
{
"epoch": 4.69,
"learning_rate": 1.1286855288347675e-05,
"loss": 1.1386,
"step": 784
},
{
"epoch": 4.7,
"learning_rate": 1.0856684809930151e-05,
"loss": 0.9418,
"step": 785
},
{
"epoch": 4.7,
"learning_rate": 1.0434400978303426e-05,
"loss": 1.1213,
"step": 786
},
{
"epoch": 4.71,
"learning_rate": 1.0020041153570347e-05,
"loss": 0.9317,
"step": 787
},
{
"epoch": 4.72,
"learning_rate": 9.613641994784651e-06,
"loss": 1.0073,
"step": 788
},
{
"epoch": 4.72,
"learning_rate": 9.215239456707635e-06,
"loss": 1.0513,
"step": 789
},
{
"epoch": 4.73,
"learning_rate": 8.824868786627304e-06,
"loss": 1.1849,
"step": 790
},
{
"epoch": 4.73,
"learning_rate": 8.442564521239782e-06,
"loss": 1.0606,
"step": 791
},
{
"epoch": 4.74,
"learning_rate": 8.06836048359394e-06,
"loss": 0.9558,
"step": 792
},
{
"epoch": 4.75,
"learning_rate": 7.70228978009907e-06,
"loss": 1.0529,
"step": 793
},
{
"epoch": 4.75,
"learning_rate": 7.344384797595716e-06,
"loss": 0.9983,
"step": 794
},
{
"epoch": 4.76,
"learning_rate": 6.994677200490507e-06,
"loss": 0.9064,
"step": 795
},
{
"epoch": 4.76,
"learning_rate": 6.653197927954791e-06,
"loss": 1.0392,
"step": 796
},
{
"epoch": 4.77,
"learning_rate": 6.319977191187232e-06,
"loss": 1.0478,
"step": 797
},
{
"epoch": 4.78,
"learning_rate": 5.995044470741151e-06,
"loss": 1.0528,
"step": 798
},
{
"epoch": 4.78,
"learning_rate": 5.678428513916212e-06,
"loss": 1.0008,
"step": 799
},
{
"epoch": 4.79,
"learning_rate": 5.370157332215209e-06,
"loss": 1.0729,
"step": 800
},
{
"epoch": 4.79,
"learning_rate": 5.07025819886574e-06,
"loss": 1.0285,
"step": 801
},
{
"epoch": 4.8,
"learning_rate": 4.778757646407362e-06,
"loss": 1.0473,
"step": 802
},
{
"epoch": 4.81,
"learning_rate": 4.495681464344259e-06,
"loss": 1.0021,
"step": 803
},
{
"epoch": 4.81,
"learning_rate": 4.22105469686348e-06,
"loss": 1.0449,
"step": 804
},
{
"epoch": 4.82,
"learning_rate": 3.954901640619368e-06,
"loss": 0.9546,
"step": 805
},
{
"epoch": 4.82,
"learning_rate": 3.6972458425838962e-06,
"loss": 1.0409,
"step": 806
},
{
"epoch": 4.83,
"learning_rate": 3.4481100979635306e-06,
"loss": 1.0518,
"step": 807
},
{
"epoch": 4.84,
"learning_rate": 3.207516448182435e-06,
"loss": 0.9276,
"step": 808
},
{
"epoch": 4.84,
"learning_rate": 2.9754861789324073e-06,
"loss": 1.0274,
"step": 809
},
{
"epoch": 4.85,
"learning_rate": 2.752039818289809e-06,
"loss": 1.1427,
"step": 810
}
],
"logging_steps": 1,
"max_steps": 835,
"num_train_epochs": 5,
"save_steps": 15,
"total_flos": 1.213933281851474e+17,
"trial_name": null,
"trial_params": null
}