gpt2-wikitext-103 / trainer_state.json
Jinchen's picture
Add fine-tuned model
e76c1eb
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 10.0,
"global_step": 1110,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 9.00900900900901e-08,
"loss": 3.6855,
"step": 1
},
{
"epoch": 0.02,
"learning_rate": 1.801801801801802e-07,
"loss": 3.8965,
"step": 2
},
{
"epoch": 0.03,
"learning_rate": 2.702702702702703e-07,
"loss": 3.8789,
"step": 3
},
{
"epoch": 0.04,
"learning_rate": 3.603603603603604e-07,
"loss": 3.875,
"step": 4
},
{
"epoch": 0.05,
"learning_rate": 4.504504504504505e-07,
"loss": 3.9004,
"step": 5
},
{
"epoch": 0.05,
"learning_rate": 5.405405405405406e-07,
"loss": 3.7148,
"step": 6
},
{
"epoch": 0.06,
"learning_rate": 6.306306306306306e-07,
"loss": 3.8652,
"step": 7
},
{
"epoch": 0.07,
"learning_rate": 7.207207207207208e-07,
"loss": 3.7227,
"step": 8
},
{
"epoch": 0.08,
"learning_rate": 8.108108108108109e-07,
"loss": 3.7637,
"step": 9
},
{
"epoch": 0.09,
"learning_rate": 9.00900900900901e-07,
"loss": 3.5371,
"step": 10
},
{
"epoch": 0.1,
"learning_rate": 9.909909909909911e-07,
"loss": 3.791,
"step": 11
},
{
"epoch": 0.11,
"learning_rate": 1.0810810810810812e-06,
"loss": 3.6094,
"step": 12
},
{
"epoch": 0.12,
"learning_rate": 1.1711711711711712e-06,
"loss": 3.666,
"step": 13
},
{
"epoch": 0.13,
"learning_rate": 1.2612612612612613e-06,
"loss": 3.6855,
"step": 14
},
{
"epoch": 0.14,
"learning_rate": 1.3513513513513515e-06,
"loss": 3.7734,
"step": 15
},
{
"epoch": 0.14,
"learning_rate": 1.4414414414414416e-06,
"loss": 3.6406,
"step": 16
},
{
"epoch": 0.15,
"learning_rate": 1.5315315315315316e-06,
"loss": 3.5977,
"step": 17
},
{
"epoch": 0.16,
"learning_rate": 1.6216216216216219e-06,
"loss": 3.7031,
"step": 18
},
{
"epoch": 0.17,
"learning_rate": 1.711711711711712e-06,
"loss": 3.4102,
"step": 19
},
{
"epoch": 0.18,
"learning_rate": 1.801801801801802e-06,
"loss": 3.6465,
"step": 20
},
{
"epoch": 0.19,
"learning_rate": 1.8918918918918922e-06,
"loss": 3.5234,
"step": 21
},
{
"epoch": 0.2,
"learning_rate": 1.9819819819819822e-06,
"loss": 3.5664,
"step": 22
},
{
"epoch": 0.21,
"learning_rate": 2.0720720720720723e-06,
"loss": 3.6035,
"step": 23
},
{
"epoch": 0.22,
"learning_rate": 2.1621621621621623e-06,
"loss": 3.6211,
"step": 24
},
{
"epoch": 0.23,
"learning_rate": 2.2522522522522524e-06,
"loss": 3.5,
"step": 25
},
{
"epoch": 0.23,
"learning_rate": 2.3423423423423424e-06,
"loss": 3.5605,
"step": 26
},
{
"epoch": 0.24,
"learning_rate": 2.432432432432433e-06,
"loss": 3.5371,
"step": 27
},
{
"epoch": 0.25,
"learning_rate": 2.5225225225225225e-06,
"loss": 3.623,
"step": 28
},
{
"epoch": 0.26,
"learning_rate": 2.612612612612613e-06,
"loss": 3.4277,
"step": 29
},
{
"epoch": 0.27,
"learning_rate": 2.702702702702703e-06,
"loss": 3.6289,
"step": 30
},
{
"epoch": 0.28,
"learning_rate": 2.7927927927927926e-06,
"loss": 3.6973,
"step": 31
},
{
"epoch": 0.29,
"learning_rate": 2.882882882882883e-06,
"loss": 3.2871,
"step": 32
},
{
"epoch": 0.3,
"learning_rate": 2.9729729729729736e-06,
"loss": 3.4746,
"step": 33
},
{
"epoch": 0.31,
"learning_rate": 3.063063063063063e-06,
"loss": 3.541,
"step": 34
},
{
"epoch": 0.32,
"learning_rate": 3.1531531531531532e-06,
"loss": 3.4258,
"step": 35
},
{
"epoch": 0.32,
"learning_rate": 3.2432432432432437e-06,
"loss": 3.5742,
"step": 36
},
{
"epoch": 0.33,
"learning_rate": 3.3333333333333333e-06,
"loss": 3.5762,
"step": 37
},
{
"epoch": 0.34,
"learning_rate": 3.423423423423424e-06,
"loss": 3.627,
"step": 38
},
{
"epoch": 0.35,
"learning_rate": 3.513513513513514e-06,
"loss": 3.6406,
"step": 39
},
{
"epoch": 0.36,
"learning_rate": 3.603603603603604e-06,
"loss": 3.4355,
"step": 40
},
{
"epoch": 0.37,
"learning_rate": 3.693693693693694e-06,
"loss": 3.5664,
"step": 41
},
{
"epoch": 0.38,
"learning_rate": 3.7837837837837844e-06,
"loss": 3.5,
"step": 42
},
{
"epoch": 0.39,
"learning_rate": 3.8738738738738744e-06,
"loss": 3.7227,
"step": 43
},
{
"epoch": 0.4,
"learning_rate": 3.9639639639639645e-06,
"loss": 3.4102,
"step": 44
},
{
"epoch": 0.41,
"learning_rate": 4.0540540540540545e-06,
"loss": 3.4883,
"step": 45
},
{
"epoch": 0.41,
"learning_rate": 4.1441441441441446e-06,
"loss": 3.5723,
"step": 46
},
{
"epoch": 0.42,
"learning_rate": 4.234234234234235e-06,
"loss": 3.3359,
"step": 47
},
{
"epoch": 0.43,
"learning_rate": 4.324324324324325e-06,
"loss": 3.5547,
"step": 48
},
{
"epoch": 0.44,
"learning_rate": 4.414414414414415e-06,
"loss": 3.5508,
"step": 49
},
{
"epoch": 0.45,
"learning_rate": 4.504504504504505e-06,
"loss": 3.3633,
"step": 50
},
{
"epoch": 0.46,
"learning_rate": 4.594594594594596e-06,
"loss": 3.3711,
"step": 51
},
{
"epoch": 0.47,
"learning_rate": 4.684684684684685e-06,
"loss": 3.4102,
"step": 52
},
{
"epoch": 0.48,
"learning_rate": 4.774774774774775e-06,
"loss": 3.2422,
"step": 53
},
{
"epoch": 0.49,
"learning_rate": 4.864864864864866e-06,
"loss": 3.5723,
"step": 54
},
{
"epoch": 0.5,
"learning_rate": 4.954954954954955e-06,
"loss": 3.4727,
"step": 55
},
{
"epoch": 0.5,
"learning_rate": 5.045045045045045e-06,
"loss": 3.5508,
"step": 56
},
{
"epoch": 0.51,
"learning_rate": 5.135135135135135e-06,
"loss": 3.5469,
"step": 57
},
{
"epoch": 0.52,
"learning_rate": 5.225225225225226e-06,
"loss": 3.418,
"step": 58
},
{
"epoch": 0.53,
"learning_rate": 5.315315315315316e-06,
"loss": 3.5469,
"step": 59
},
{
"epoch": 0.54,
"learning_rate": 5.405405405405406e-06,
"loss": 3.6055,
"step": 60
},
{
"epoch": 0.55,
"learning_rate": 5.495495495495496e-06,
"loss": 3.4746,
"step": 61
},
{
"epoch": 0.56,
"learning_rate": 5.585585585585585e-06,
"loss": 3.2266,
"step": 62
},
{
"epoch": 0.57,
"learning_rate": 5.675675675675676e-06,
"loss": 3.3047,
"step": 63
},
{
"epoch": 0.58,
"learning_rate": 5.765765765765766e-06,
"loss": 3.4531,
"step": 64
},
{
"epoch": 0.59,
"learning_rate": 5.855855855855856e-06,
"loss": 3.5977,
"step": 65
},
{
"epoch": 0.59,
"learning_rate": 5.945945945945947e-06,
"loss": 3.6289,
"step": 66
},
{
"epoch": 0.6,
"learning_rate": 6.036036036036037e-06,
"loss": 3.2539,
"step": 67
},
{
"epoch": 0.61,
"learning_rate": 6.126126126126126e-06,
"loss": 3.4141,
"step": 68
},
{
"epoch": 0.62,
"learning_rate": 6.2162162162162164e-06,
"loss": 3.4316,
"step": 69
},
{
"epoch": 0.63,
"learning_rate": 6.3063063063063065e-06,
"loss": 3.2227,
"step": 70
},
{
"epoch": 0.64,
"learning_rate": 6.396396396396397e-06,
"loss": 3.5234,
"step": 71
},
{
"epoch": 0.65,
"learning_rate": 6.486486486486487e-06,
"loss": 3.4922,
"step": 72
},
{
"epoch": 0.66,
"learning_rate": 6.5765765765765775e-06,
"loss": 3.4922,
"step": 73
},
{
"epoch": 0.67,
"learning_rate": 6.666666666666667e-06,
"loss": 3.5742,
"step": 74
},
{
"epoch": 0.68,
"learning_rate": 6.7567567567567575e-06,
"loss": 3.2207,
"step": 75
},
{
"epoch": 0.68,
"learning_rate": 6.846846846846848e-06,
"loss": 3.5137,
"step": 76
},
{
"epoch": 0.69,
"learning_rate": 6.936936936936938e-06,
"loss": 3.248,
"step": 77
},
{
"epoch": 0.7,
"learning_rate": 7.027027027027028e-06,
"loss": 3.3711,
"step": 78
},
{
"epoch": 0.71,
"learning_rate": 7.117117117117117e-06,
"loss": 3.2012,
"step": 79
},
{
"epoch": 0.72,
"learning_rate": 7.207207207207208e-06,
"loss": 3.252,
"step": 80
},
{
"epoch": 0.73,
"learning_rate": 7.297297297297298e-06,
"loss": 3.5039,
"step": 81
},
{
"epoch": 0.74,
"learning_rate": 7.387387387387388e-06,
"loss": 3.2539,
"step": 82
},
{
"epoch": 0.75,
"learning_rate": 7.477477477477479e-06,
"loss": 3.4668,
"step": 83
},
{
"epoch": 0.76,
"learning_rate": 7.567567567567569e-06,
"loss": 2.9492,
"step": 84
},
{
"epoch": 0.77,
"learning_rate": 7.657657657657658e-06,
"loss": 3.3594,
"step": 85
},
{
"epoch": 0.77,
"learning_rate": 7.747747747747749e-06,
"loss": 3.4844,
"step": 86
},
{
"epoch": 0.78,
"learning_rate": 7.837837837837838e-06,
"loss": 3.4375,
"step": 87
},
{
"epoch": 0.79,
"learning_rate": 7.927927927927929e-06,
"loss": 3.2637,
"step": 88
},
{
"epoch": 0.8,
"learning_rate": 8.018018018018018e-06,
"loss": 3.2969,
"step": 89
},
{
"epoch": 0.81,
"learning_rate": 8.108108108108109e-06,
"loss": 3.2188,
"step": 90
},
{
"epoch": 0.82,
"learning_rate": 8.198198198198198e-06,
"loss": 3.2324,
"step": 91
},
{
"epoch": 0.83,
"learning_rate": 8.288288288288289e-06,
"loss": 3.3359,
"step": 92
},
{
"epoch": 0.84,
"learning_rate": 8.378378378378378e-06,
"loss": 3.2695,
"step": 93
},
{
"epoch": 0.85,
"learning_rate": 8.46846846846847e-06,
"loss": 3.3145,
"step": 94
},
{
"epoch": 0.86,
"learning_rate": 8.55855855855856e-06,
"loss": 3.5645,
"step": 95
},
{
"epoch": 0.86,
"learning_rate": 8.64864864864865e-06,
"loss": 3.3711,
"step": 96
},
{
"epoch": 0.87,
"learning_rate": 8.738738738738739e-06,
"loss": 3.2266,
"step": 97
},
{
"epoch": 0.88,
"learning_rate": 8.82882882882883e-06,
"loss": 3.3984,
"step": 98
},
{
"epoch": 0.89,
"learning_rate": 8.91891891891892e-06,
"loss": 3.4766,
"step": 99
},
{
"epoch": 0.9,
"learning_rate": 9.00900900900901e-06,
"loss": 3.4102,
"step": 100
},
{
"epoch": 0.91,
"learning_rate": 9.0990990990991e-06,
"loss": 3.2891,
"step": 101
},
{
"epoch": 0.92,
"learning_rate": 9.189189189189191e-06,
"loss": 3.3633,
"step": 102
},
{
"epoch": 0.93,
"learning_rate": 9.27927927927928e-06,
"loss": 3.5332,
"step": 103
},
{
"epoch": 0.94,
"learning_rate": 9.36936936936937e-06,
"loss": 3.4414,
"step": 104
},
{
"epoch": 0.95,
"learning_rate": 9.45945945945946e-06,
"loss": 3.3867,
"step": 105
},
{
"epoch": 0.95,
"learning_rate": 9.54954954954955e-06,
"loss": 3.1797,
"step": 106
},
{
"epoch": 0.96,
"learning_rate": 9.63963963963964e-06,
"loss": 3.1895,
"step": 107
},
{
"epoch": 0.97,
"learning_rate": 9.729729729729732e-06,
"loss": 3.1465,
"step": 108
},
{
"epoch": 0.98,
"learning_rate": 9.81981981981982e-06,
"loss": 3.1055,
"step": 109
},
{
"epoch": 0.99,
"learning_rate": 9.90990990990991e-06,
"loss": 3.3828,
"step": 110
},
{
"epoch": 1.0,
"learning_rate": 1e-05,
"loss": 3.3965,
"step": 111
},
{
"epoch": 1.01,
"learning_rate": 9.989989989989992e-06,
"loss": 3.2383,
"step": 112
},
{
"epoch": 1.02,
"learning_rate": 9.979979979979981e-06,
"loss": 3.2852,
"step": 113
},
{
"epoch": 1.03,
"learning_rate": 9.96996996996997e-06,
"loss": 3.2422,
"step": 114
},
{
"epoch": 1.04,
"learning_rate": 9.95995995995996e-06,
"loss": 3.3633,
"step": 115
},
{
"epoch": 1.05,
"learning_rate": 9.949949949949951e-06,
"loss": 3.3125,
"step": 116
},
{
"epoch": 1.05,
"learning_rate": 9.93993993993994e-06,
"loss": 3.1836,
"step": 117
},
{
"epoch": 1.06,
"learning_rate": 9.929929929929931e-06,
"loss": 3.3359,
"step": 118
},
{
"epoch": 1.07,
"learning_rate": 9.91991991991992e-06,
"loss": 3.3965,
"step": 119
},
{
"epoch": 1.08,
"learning_rate": 9.90990990990991e-06,
"loss": 3.3027,
"step": 120
},
{
"epoch": 1.09,
"learning_rate": 9.899899899899901e-06,
"loss": 3.1836,
"step": 121
},
{
"epoch": 1.1,
"learning_rate": 9.88988988988989e-06,
"loss": 3.0918,
"step": 122
},
{
"epoch": 1.11,
"learning_rate": 9.879879879879881e-06,
"loss": 3.2734,
"step": 123
},
{
"epoch": 1.12,
"learning_rate": 9.86986986986987e-06,
"loss": 3.3652,
"step": 124
},
{
"epoch": 1.13,
"learning_rate": 9.85985985985986e-06,
"loss": 3.3438,
"step": 125
},
{
"epoch": 1.14,
"learning_rate": 9.849849849849851e-06,
"loss": 3.1211,
"step": 126
},
{
"epoch": 1.14,
"learning_rate": 9.83983983983984e-06,
"loss": 3.2676,
"step": 127
},
{
"epoch": 1.15,
"learning_rate": 9.829829829829831e-06,
"loss": 3.2148,
"step": 128
},
{
"epoch": 1.16,
"learning_rate": 9.81981981981982e-06,
"loss": 3.3438,
"step": 129
},
{
"epoch": 1.17,
"learning_rate": 9.80980980980981e-06,
"loss": 3.2188,
"step": 130
},
{
"epoch": 1.18,
"learning_rate": 9.799799799799801e-06,
"loss": 3.1855,
"step": 131
},
{
"epoch": 1.19,
"learning_rate": 9.78978978978979e-06,
"loss": 3.3984,
"step": 132
},
{
"epoch": 1.2,
"learning_rate": 9.779779779779781e-06,
"loss": 3.3008,
"step": 133
},
{
"epoch": 1.21,
"learning_rate": 9.76976976976977e-06,
"loss": 3.3359,
"step": 134
},
{
"epoch": 1.22,
"learning_rate": 9.75975975975976e-06,
"loss": 3.0293,
"step": 135
},
{
"epoch": 1.23,
"learning_rate": 9.749749749749751e-06,
"loss": 3.2227,
"step": 136
},
{
"epoch": 1.23,
"learning_rate": 9.73973973973974e-06,
"loss": 3.4922,
"step": 137
},
{
"epoch": 1.24,
"learning_rate": 9.729729729729732e-06,
"loss": 3.2969,
"step": 138
},
{
"epoch": 1.25,
"learning_rate": 9.719719719719721e-06,
"loss": 3.4707,
"step": 139
},
{
"epoch": 1.26,
"learning_rate": 9.70970970970971e-06,
"loss": 3.2109,
"step": 140
},
{
"epoch": 1.27,
"learning_rate": 9.699699699699701e-06,
"loss": 3.1641,
"step": 141
},
{
"epoch": 1.28,
"learning_rate": 9.68968968968969e-06,
"loss": 3.3281,
"step": 142
},
{
"epoch": 1.29,
"learning_rate": 9.67967967967968e-06,
"loss": 3.375,
"step": 143
},
{
"epoch": 1.3,
"learning_rate": 9.669669669669671e-06,
"loss": 3.3828,
"step": 144
},
{
"epoch": 1.31,
"learning_rate": 9.65965965965966e-06,
"loss": 3.2266,
"step": 145
},
{
"epoch": 1.32,
"learning_rate": 9.649649649649651e-06,
"loss": 3.0215,
"step": 146
},
{
"epoch": 1.32,
"learning_rate": 9.63963963963964e-06,
"loss": 3.1562,
"step": 147
},
{
"epoch": 1.33,
"learning_rate": 9.62962962962963e-06,
"loss": 3.3066,
"step": 148
},
{
"epoch": 1.34,
"learning_rate": 9.61961961961962e-06,
"loss": 3.0723,
"step": 149
},
{
"epoch": 1.35,
"learning_rate": 9.60960960960961e-06,
"loss": 3.5586,
"step": 150
},
{
"epoch": 1.36,
"learning_rate": 9.5995995995996e-06,
"loss": 3.3164,
"step": 151
},
{
"epoch": 1.37,
"learning_rate": 9.58958958958959e-06,
"loss": 3.0547,
"step": 152
},
{
"epoch": 1.38,
"learning_rate": 9.57957957957958e-06,
"loss": 3.2402,
"step": 153
},
{
"epoch": 1.39,
"learning_rate": 9.56956956956957e-06,
"loss": 3.1602,
"step": 154
},
{
"epoch": 1.4,
"learning_rate": 9.55955955955956e-06,
"loss": 3.3848,
"step": 155
},
{
"epoch": 1.41,
"learning_rate": 9.54954954954955e-06,
"loss": 3.4629,
"step": 156
},
{
"epoch": 1.41,
"learning_rate": 9.53953953953954e-06,
"loss": 3.4082,
"step": 157
},
{
"epoch": 1.42,
"learning_rate": 9.52952952952953e-06,
"loss": 3.5,
"step": 158
},
{
"epoch": 1.43,
"learning_rate": 9.51951951951952e-06,
"loss": 3.3086,
"step": 159
},
{
"epoch": 1.44,
"learning_rate": 9.50950950950951e-06,
"loss": 3.168,
"step": 160
},
{
"epoch": 1.45,
"learning_rate": 9.4994994994995e-06,
"loss": 3.5,
"step": 161
},
{
"epoch": 1.46,
"learning_rate": 9.489489489489491e-06,
"loss": 3.2305,
"step": 162
},
{
"epoch": 1.47,
"learning_rate": 9.47947947947948e-06,
"loss": 3.3086,
"step": 163
},
{
"epoch": 1.48,
"learning_rate": 9.46946946946947e-06,
"loss": 3.2109,
"step": 164
},
{
"epoch": 1.49,
"learning_rate": 9.45945945945946e-06,
"loss": 3.2402,
"step": 165
},
{
"epoch": 1.5,
"learning_rate": 9.44944944944945e-06,
"loss": 3.0352,
"step": 166
},
{
"epoch": 1.5,
"learning_rate": 9.439439439439441e-06,
"loss": 3.3848,
"step": 167
},
{
"epoch": 1.51,
"learning_rate": 9.42942942942943e-06,
"loss": 3.1465,
"step": 168
},
{
"epoch": 1.52,
"learning_rate": 9.41941941941942e-06,
"loss": 3.1562,
"step": 169
},
{
"epoch": 1.53,
"learning_rate": 9.40940940940941e-06,
"loss": 3.1055,
"step": 170
},
{
"epoch": 1.54,
"learning_rate": 9.3993993993994e-06,
"loss": 3.2754,
"step": 171
},
{
"epoch": 1.55,
"learning_rate": 9.389389389389391e-06,
"loss": 3.1172,
"step": 172
},
{
"epoch": 1.56,
"learning_rate": 9.37937937937938e-06,
"loss": 3.4648,
"step": 173
},
{
"epoch": 1.57,
"learning_rate": 9.36936936936937e-06,
"loss": 3.375,
"step": 174
},
{
"epoch": 1.58,
"learning_rate": 9.35935935935936e-06,
"loss": 3.1055,
"step": 175
},
{
"epoch": 1.59,
"learning_rate": 9.34934934934935e-06,
"loss": 3.25,
"step": 176
},
{
"epoch": 1.59,
"learning_rate": 9.339339339339341e-06,
"loss": 3.1523,
"step": 177
},
{
"epoch": 1.6,
"learning_rate": 9.32932932932933e-06,
"loss": 3.3496,
"step": 178
},
{
"epoch": 1.61,
"learning_rate": 9.31931931931932e-06,
"loss": 2.9512,
"step": 179
},
{
"epoch": 1.62,
"learning_rate": 9.30930930930931e-06,
"loss": 3.1992,
"step": 180
},
{
"epoch": 1.63,
"learning_rate": 9.2992992992993e-06,
"loss": 3.3379,
"step": 181
},
{
"epoch": 1.64,
"learning_rate": 9.289289289289291e-06,
"loss": 3.2539,
"step": 182
},
{
"epoch": 1.65,
"learning_rate": 9.27927927927928e-06,
"loss": 2.9297,
"step": 183
},
{
"epoch": 1.66,
"learning_rate": 9.26926926926927e-06,
"loss": 2.8711,
"step": 184
},
{
"epoch": 1.67,
"learning_rate": 9.25925925925926e-06,
"loss": 3.1387,
"step": 185
},
{
"epoch": 1.68,
"learning_rate": 9.24924924924925e-06,
"loss": 3.1953,
"step": 186
},
{
"epoch": 1.68,
"learning_rate": 9.239239239239241e-06,
"loss": 3.207,
"step": 187
},
{
"epoch": 1.69,
"learning_rate": 9.229229229229229e-06,
"loss": 3.1113,
"step": 188
},
{
"epoch": 1.7,
"learning_rate": 9.21921921921922e-06,
"loss": 2.9727,
"step": 189
},
{
"epoch": 1.71,
"learning_rate": 9.20920920920921e-06,
"loss": 3.0254,
"step": 190
},
{
"epoch": 1.72,
"learning_rate": 9.1991991991992e-06,
"loss": 3.1191,
"step": 191
},
{
"epoch": 1.73,
"learning_rate": 9.189189189189191e-06,
"loss": 3.3027,
"step": 192
},
{
"epoch": 1.74,
"learning_rate": 9.179179179179179e-06,
"loss": 3.2383,
"step": 193
},
{
"epoch": 1.75,
"learning_rate": 9.16916916916917e-06,
"loss": 3.4258,
"step": 194
},
{
"epoch": 1.76,
"learning_rate": 9.15915915915916e-06,
"loss": 3.0527,
"step": 195
},
{
"epoch": 1.77,
"learning_rate": 9.14914914914915e-06,
"loss": 3.3945,
"step": 196
},
{
"epoch": 1.77,
"learning_rate": 9.13913913913914e-06,
"loss": 3.1211,
"step": 197
},
{
"epoch": 1.78,
"learning_rate": 9.129129129129129e-06,
"loss": 3.418,
"step": 198
},
{
"epoch": 1.79,
"learning_rate": 9.11911911911912e-06,
"loss": 3.1328,
"step": 199
},
{
"epoch": 1.8,
"learning_rate": 9.10910910910911e-06,
"loss": 3.0742,
"step": 200
},
{
"epoch": 1.81,
"learning_rate": 9.0990990990991e-06,
"loss": 3.4512,
"step": 201
},
{
"epoch": 1.82,
"learning_rate": 9.08908908908909e-06,
"loss": 3.4551,
"step": 202
},
{
"epoch": 1.83,
"learning_rate": 9.079079079079079e-06,
"loss": 3.0996,
"step": 203
},
{
"epoch": 1.84,
"learning_rate": 9.06906906906907e-06,
"loss": 3.2734,
"step": 204
},
{
"epoch": 1.85,
"learning_rate": 9.05905905905906e-06,
"loss": 3.1895,
"step": 205
},
{
"epoch": 1.86,
"learning_rate": 9.04904904904905e-06,
"loss": 3.293,
"step": 206
},
{
"epoch": 1.86,
"learning_rate": 9.03903903903904e-06,
"loss": 3.2832,
"step": 207
},
{
"epoch": 1.87,
"learning_rate": 9.029029029029029e-06,
"loss": 3.3047,
"step": 208
},
{
"epoch": 1.88,
"learning_rate": 9.01901901901902e-06,
"loss": 3.2656,
"step": 209
},
{
"epoch": 1.89,
"learning_rate": 9.00900900900901e-06,
"loss": 3.3281,
"step": 210
},
{
"epoch": 1.9,
"learning_rate": 8.998998998999e-06,
"loss": 3.0312,
"step": 211
},
{
"epoch": 1.91,
"learning_rate": 8.98898898898899e-06,
"loss": 3.2559,
"step": 212
},
{
"epoch": 1.92,
"learning_rate": 8.97897897897898e-06,
"loss": 3.3125,
"step": 213
},
{
"epoch": 1.93,
"learning_rate": 8.96896896896897e-06,
"loss": 3.1484,
"step": 214
},
{
"epoch": 1.94,
"learning_rate": 8.95895895895896e-06,
"loss": 3.3145,
"step": 215
},
{
"epoch": 1.95,
"learning_rate": 8.94894894894895e-06,
"loss": 3.291,
"step": 216
},
{
"epoch": 1.95,
"learning_rate": 8.93893893893894e-06,
"loss": 3.1992,
"step": 217
},
{
"epoch": 1.96,
"learning_rate": 8.92892892892893e-06,
"loss": 3.2559,
"step": 218
},
{
"epoch": 1.97,
"learning_rate": 8.91891891891892e-06,
"loss": 3.1895,
"step": 219
},
{
"epoch": 1.98,
"learning_rate": 8.90890890890891e-06,
"loss": 3.1641,
"step": 220
},
{
"epoch": 1.99,
"learning_rate": 8.8988988988989e-06,
"loss": 3.3027,
"step": 221
},
{
"epoch": 2.0,
"learning_rate": 8.888888888888888e-06,
"loss": 3.1016,
"step": 222
},
{
"epoch": 2.01,
"learning_rate": 8.87887887887888e-06,
"loss": 3.1797,
"step": 223
},
{
"epoch": 2.02,
"learning_rate": 8.86886886886887e-06,
"loss": 2.957,
"step": 224
},
{
"epoch": 2.03,
"learning_rate": 8.85885885885886e-06,
"loss": 3.0566,
"step": 225
},
{
"epoch": 2.04,
"learning_rate": 8.84884884884885e-06,
"loss": 3.082,
"step": 226
},
{
"epoch": 2.05,
"learning_rate": 8.838838838838838e-06,
"loss": 3.2871,
"step": 227
},
{
"epoch": 2.05,
"learning_rate": 8.82882882882883e-06,
"loss": 3.3027,
"step": 228
},
{
"epoch": 2.06,
"learning_rate": 8.818818818818819e-06,
"loss": 3.2148,
"step": 229
},
{
"epoch": 2.07,
"learning_rate": 8.80880880880881e-06,
"loss": 3.4688,
"step": 230
},
{
"epoch": 2.08,
"learning_rate": 8.798798798798799e-06,
"loss": 3.1641,
"step": 231
},
{
"epoch": 2.09,
"learning_rate": 8.788788788788788e-06,
"loss": 3.2812,
"step": 232
},
{
"epoch": 2.1,
"learning_rate": 8.77877877877878e-06,
"loss": 3.4238,
"step": 233
},
{
"epoch": 2.11,
"learning_rate": 8.768768768768769e-06,
"loss": 3.3242,
"step": 234
},
{
"epoch": 2.12,
"learning_rate": 8.75875875875876e-06,
"loss": 3.1016,
"step": 235
},
{
"epoch": 2.13,
"learning_rate": 8.74874874874875e-06,
"loss": 3.2617,
"step": 236
},
{
"epoch": 2.14,
"learning_rate": 8.738738738738739e-06,
"loss": 2.9805,
"step": 237
},
{
"epoch": 2.14,
"learning_rate": 8.72872872872873e-06,
"loss": 3.002,
"step": 238
},
{
"epoch": 2.15,
"learning_rate": 8.718718718718719e-06,
"loss": 2.9922,
"step": 239
},
{
"epoch": 2.16,
"learning_rate": 8.70870870870871e-06,
"loss": 3.2656,
"step": 240
},
{
"epoch": 2.17,
"learning_rate": 8.6986986986987e-06,
"loss": 3.4043,
"step": 241
},
{
"epoch": 2.18,
"learning_rate": 8.688688688688689e-06,
"loss": 2.9336,
"step": 242
},
{
"epoch": 2.19,
"learning_rate": 8.67867867867868e-06,
"loss": 2.8555,
"step": 243
},
{
"epoch": 2.2,
"learning_rate": 8.668668668668669e-06,
"loss": 3.1094,
"step": 244
},
{
"epoch": 2.21,
"learning_rate": 8.65865865865866e-06,
"loss": 3.1973,
"step": 245
},
{
"epoch": 2.22,
"learning_rate": 8.64864864864865e-06,
"loss": 3.4434,
"step": 246
},
{
"epoch": 2.23,
"learning_rate": 8.638638638638639e-06,
"loss": 3.2012,
"step": 247
},
{
"epoch": 2.23,
"learning_rate": 8.62862862862863e-06,
"loss": 3.0605,
"step": 248
},
{
"epoch": 2.24,
"learning_rate": 8.618618618618619e-06,
"loss": 3.3398,
"step": 249
},
{
"epoch": 2.25,
"learning_rate": 8.60860860860861e-06,
"loss": 3.3027,
"step": 250
},
{
"epoch": 2.26,
"learning_rate": 8.5985985985986e-06,
"loss": 2.9316,
"step": 251
},
{
"epoch": 2.27,
"learning_rate": 8.588588588588589e-06,
"loss": 3.3477,
"step": 252
},
{
"epoch": 2.28,
"learning_rate": 8.57857857857858e-06,
"loss": 3.1055,
"step": 253
},
{
"epoch": 2.29,
"learning_rate": 8.568568568568569e-06,
"loss": 3.1035,
"step": 254
},
{
"epoch": 2.3,
"learning_rate": 8.55855855855856e-06,
"loss": 3.002,
"step": 255
},
{
"epoch": 2.31,
"learning_rate": 8.54854854854855e-06,
"loss": 3.0449,
"step": 256
},
{
"epoch": 2.32,
"learning_rate": 8.538538538538539e-06,
"loss": 3.3125,
"step": 257
},
{
"epoch": 2.32,
"learning_rate": 8.52852852852853e-06,
"loss": 2.9414,
"step": 258
},
{
"epoch": 2.33,
"learning_rate": 8.518518518518519e-06,
"loss": 3.3281,
"step": 259
},
{
"epoch": 2.34,
"learning_rate": 8.50850850850851e-06,
"loss": 3.0352,
"step": 260
},
{
"epoch": 2.35,
"learning_rate": 8.4984984984985e-06,
"loss": 3.1133,
"step": 261
},
{
"epoch": 2.36,
"learning_rate": 8.488488488488489e-06,
"loss": 3.1934,
"step": 262
},
{
"epoch": 2.37,
"learning_rate": 8.47847847847848e-06,
"loss": 3.0293,
"step": 263
},
{
"epoch": 2.38,
"learning_rate": 8.46846846846847e-06,
"loss": 3.2734,
"step": 264
},
{
"epoch": 2.39,
"learning_rate": 8.45845845845846e-06,
"loss": 3.4219,
"step": 265
},
{
"epoch": 2.4,
"learning_rate": 8.44844844844845e-06,
"loss": 3.2207,
"step": 266
},
{
"epoch": 2.41,
"learning_rate": 8.438438438438439e-06,
"loss": 3.1758,
"step": 267
},
{
"epoch": 2.41,
"learning_rate": 8.428428428428428e-06,
"loss": 3.1934,
"step": 268
},
{
"epoch": 2.42,
"learning_rate": 8.41841841841842e-06,
"loss": 3.0781,
"step": 269
},
{
"epoch": 2.43,
"learning_rate": 8.408408408408409e-06,
"loss": 3.0215,
"step": 270
},
{
"epoch": 2.44,
"learning_rate": 8.398398398398398e-06,
"loss": 3.2617,
"step": 271
},
{
"epoch": 2.45,
"learning_rate": 8.388388388388389e-06,
"loss": 3.1914,
"step": 272
},
{
"epoch": 2.46,
"learning_rate": 8.378378378378378e-06,
"loss": 3.3145,
"step": 273
},
{
"epoch": 2.47,
"learning_rate": 8.36836836836837e-06,
"loss": 3.2812,
"step": 274
},
{
"epoch": 2.48,
"learning_rate": 8.358358358358359e-06,
"loss": 3.4551,
"step": 275
},
{
"epoch": 2.49,
"learning_rate": 8.348348348348348e-06,
"loss": 3.0703,
"step": 276
},
{
"epoch": 2.5,
"learning_rate": 8.338338338338339e-06,
"loss": 3.0625,
"step": 277
},
{
"epoch": 2.5,
"learning_rate": 8.328328328328328e-06,
"loss": 2.8613,
"step": 278
},
{
"epoch": 2.51,
"learning_rate": 8.31831831831832e-06,
"loss": 2.9824,
"step": 279
},
{
"epoch": 2.52,
"learning_rate": 8.308308308308309e-06,
"loss": 3.1367,
"step": 280
},
{
"epoch": 2.53,
"learning_rate": 8.298298298298298e-06,
"loss": 3.2188,
"step": 281
},
{
"epoch": 2.54,
"learning_rate": 8.288288288288289e-06,
"loss": 3.2422,
"step": 282
},
{
"epoch": 2.55,
"learning_rate": 8.278278278278278e-06,
"loss": 3.0156,
"step": 283
},
{
"epoch": 2.56,
"learning_rate": 8.26826826826827e-06,
"loss": 3.1953,
"step": 284
},
{
"epoch": 2.57,
"learning_rate": 8.258258258258259e-06,
"loss": 2.916,
"step": 285
},
{
"epoch": 2.58,
"learning_rate": 8.248248248248248e-06,
"loss": 3.043,
"step": 286
},
{
"epoch": 2.59,
"learning_rate": 8.23823823823824e-06,
"loss": 3.0586,
"step": 287
},
{
"epoch": 2.59,
"learning_rate": 8.228228228228229e-06,
"loss": 3.3281,
"step": 288
},
{
"epoch": 2.6,
"learning_rate": 8.21821821821822e-06,
"loss": 3.2227,
"step": 289
},
{
"epoch": 2.61,
"learning_rate": 8.208208208208209e-06,
"loss": 3.2539,
"step": 290
},
{
"epoch": 2.62,
"learning_rate": 8.198198198198198e-06,
"loss": 3.2715,
"step": 291
},
{
"epoch": 2.63,
"learning_rate": 8.18818818818819e-06,
"loss": 3.1289,
"step": 292
},
{
"epoch": 2.64,
"learning_rate": 8.178178178178179e-06,
"loss": 2.9277,
"step": 293
},
{
"epoch": 2.65,
"learning_rate": 8.16816816816817e-06,
"loss": 3.0586,
"step": 294
},
{
"epoch": 2.66,
"learning_rate": 8.158158158158159e-06,
"loss": 3.0352,
"step": 295
},
{
"epoch": 2.67,
"learning_rate": 8.148148148148148e-06,
"loss": 3.3477,
"step": 296
},
{
"epoch": 2.68,
"learning_rate": 8.13813813813814e-06,
"loss": 3.3613,
"step": 297
},
{
"epoch": 2.68,
"learning_rate": 8.128128128128129e-06,
"loss": 3.1719,
"step": 298
},
{
"epoch": 2.69,
"learning_rate": 8.11811811811812e-06,
"loss": 3.3242,
"step": 299
},
{
"epoch": 2.7,
"learning_rate": 8.108108108108109e-06,
"loss": 2.8281,
"step": 300
},
{
"epoch": 2.71,
"learning_rate": 8.098098098098098e-06,
"loss": 3.168,
"step": 301
},
{
"epoch": 2.72,
"learning_rate": 8.088088088088088e-06,
"loss": 3.1602,
"step": 302
},
{
"epoch": 2.73,
"learning_rate": 8.078078078078079e-06,
"loss": 3.4062,
"step": 303
},
{
"epoch": 2.74,
"learning_rate": 8.06806806806807e-06,
"loss": 3.1641,
"step": 304
},
{
"epoch": 2.75,
"learning_rate": 8.058058058058059e-06,
"loss": 3.2324,
"step": 305
},
{
"epoch": 2.76,
"learning_rate": 8.048048048048048e-06,
"loss": 3.1191,
"step": 306
},
{
"epoch": 2.77,
"learning_rate": 8.038038038038038e-06,
"loss": 3.2559,
"step": 307
},
{
"epoch": 2.77,
"learning_rate": 8.028028028028029e-06,
"loss": 3.332,
"step": 308
},
{
"epoch": 2.78,
"learning_rate": 8.018018018018018e-06,
"loss": 3.2441,
"step": 309
},
{
"epoch": 2.79,
"learning_rate": 8.00800800800801e-06,
"loss": 3.0117,
"step": 310
},
{
"epoch": 2.8,
"learning_rate": 7.997997997997999e-06,
"loss": 3.2109,
"step": 311
},
{
"epoch": 2.81,
"learning_rate": 7.987987987987988e-06,
"loss": 3.1406,
"step": 312
},
{
"epoch": 2.82,
"learning_rate": 7.977977977977979e-06,
"loss": 3.2305,
"step": 313
},
{
"epoch": 2.83,
"learning_rate": 7.967967967967968e-06,
"loss": 2.9121,
"step": 314
},
{
"epoch": 2.84,
"learning_rate": 7.95795795795796e-06,
"loss": 2.9961,
"step": 315
},
{
"epoch": 2.85,
"learning_rate": 7.947947947947949e-06,
"loss": 2.9883,
"step": 316
},
{
"epoch": 2.86,
"learning_rate": 7.937937937937938e-06,
"loss": 3.0996,
"step": 317
},
{
"epoch": 2.86,
"learning_rate": 7.927927927927929e-06,
"loss": 3.2441,
"step": 318
},
{
"epoch": 2.87,
"learning_rate": 7.917917917917918e-06,
"loss": 3.2656,
"step": 319
},
{
"epoch": 2.88,
"learning_rate": 7.90790790790791e-06,
"loss": 3.2207,
"step": 320
},
{
"epoch": 2.89,
"learning_rate": 7.897897897897899e-06,
"loss": 3.1523,
"step": 321
},
{
"epoch": 2.9,
"learning_rate": 7.887887887887888e-06,
"loss": 3.3086,
"step": 322
},
{
"epoch": 2.91,
"learning_rate": 7.877877877877879e-06,
"loss": 3.1934,
"step": 323
},
{
"epoch": 2.92,
"learning_rate": 7.867867867867868e-06,
"loss": 3.2539,
"step": 324
},
{
"epoch": 2.93,
"learning_rate": 7.85785785785786e-06,
"loss": 3.4473,
"step": 325
},
{
"epoch": 2.94,
"learning_rate": 7.847847847847849e-06,
"loss": 3.0957,
"step": 326
},
{
"epoch": 2.95,
"learning_rate": 7.837837837837838e-06,
"loss": 3.1953,
"step": 327
},
{
"epoch": 2.95,
"learning_rate": 7.827827827827829e-06,
"loss": 3.1895,
"step": 328
},
{
"epoch": 2.96,
"learning_rate": 7.817817817817818e-06,
"loss": 3.3066,
"step": 329
},
{
"epoch": 2.97,
"learning_rate": 7.807807807807808e-06,
"loss": 3.1152,
"step": 330
},
{
"epoch": 2.98,
"learning_rate": 7.797797797797799e-06,
"loss": 3.1914,
"step": 331
},
{
"epoch": 2.99,
"learning_rate": 7.787787787787788e-06,
"loss": 3.1973,
"step": 332
},
{
"epoch": 3.0,
"learning_rate": 7.77777777777778e-06,
"loss": 3.1094,
"step": 333
},
{
"epoch": 3.01,
"learning_rate": 7.767767767767769e-06,
"loss": 3.1133,
"step": 334
},
{
"epoch": 3.02,
"learning_rate": 7.757757757757758e-06,
"loss": 3.3789,
"step": 335
},
{
"epoch": 3.03,
"learning_rate": 7.747747747747749e-06,
"loss": 3.0156,
"step": 336
},
{
"epoch": 3.04,
"learning_rate": 7.737737737737738e-06,
"loss": 2.9746,
"step": 337
},
{
"epoch": 3.05,
"learning_rate": 7.72772772772773e-06,
"loss": 3.0391,
"step": 338
},
{
"epoch": 3.05,
"learning_rate": 7.717717717717719e-06,
"loss": 3.2461,
"step": 339
},
{
"epoch": 3.06,
"learning_rate": 7.707707707707708e-06,
"loss": 3.1133,
"step": 340
},
{
"epoch": 3.07,
"learning_rate": 7.697697697697697e-06,
"loss": 3.2812,
"step": 341
},
{
"epoch": 3.08,
"learning_rate": 7.687687687687688e-06,
"loss": 3.2598,
"step": 342
},
{
"epoch": 3.09,
"learning_rate": 7.67767767767768e-06,
"loss": 3.2266,
"step": 343
},
{
"epoch": 3.1,
"learning_rate": 7.667667667667669e-06,
"loss": 3.1738,
"step": 344
},
{
"epoch": 3.11,
"learning_rate": 7.657657657657658e-06,
"loss": 3.2383,
"step": 345
},
{
"epoch": 3.12,
"learning_rate": 7.647647647647647e-06,
"loss": 3.2227,
"step": 346
},
{
"epoch": 3.13,
"learning_rate": 7.637637637637638e-06,
"loss": 3.0195,
"step": 347
},
{
"epoch": 3.14,
"learning_rate": 7.6276276276276285e-06,
"loss": 3.1641,
"step": 348
},
{
"epoch": 3.14,
"learning_rate": 7.617617617617619e-06,
"loss": 2.9746,
"step": 349
},
{
"epoch": 3.15,
"learning_rate": 7.607607607607608e-06,
"loss": 3.0508,
"step": 350
},
{
"epoch": 3.16,
"learning_rate": 7.597597597597598e-06,
"loss": 3.0527,
"step": 351
},
{
"epoch": 3.17,
"learning_rate": 7.587587587587588e-06,
"loss": 3.0527,
"step": 352
},
{
"epoch": 3.18,
"learning_rate": 7.577577577577579e-06,
"loss": 3.498,
"step": 353
},
{
"epoch": 3.19,
"learning_rate": 7.567567567567569e-06,
"loss": 3.1328,
"step": 354
},
{
"epoch": 3.2,
"learning_rate": 7.557557557557558e-06,
"loss": 3.0762,
"step": 355
},
{
"epoch": 3.21,
"learning_rate": 7.547547547547548e-06,
"loss": 3.1875,
"step": 356
},
{
"epoch": 3.22,
"learning_rate": 7.5375375375375385e-06,
"loss": 3.2109,
"step": 357
},
{
"epoch": 3.23,
"learning_rate": 7.527527527527529e-06,
"loss": 2.8652,
"step": 358
},
{
"epoch": 3.23,
"learning_rate": 7.517517517517519e-06,
"loss": 3.2012,
"step": 359
},
{
"epoch": 3.24,
"learning_rate": 7.507507507507507e-06,
"loss": 3.2734,
"step": 360
},
{
"epoch": 3.25,
"learning_rate": 7.4974974974974975e-06,
"loss": 3.252,
"step": 361
},
{
"epoch": 3.26,
"learning_rate": 7.487487487487488e-06,
"loss": 2.957,
"step": 362
},
{
"epoch": 3.27,
"learning_rate": 7.477477477477479e-06,
"loss": 3.375,
"step": 363
},
{
"epoch": 3.28,
"learning_rate": 7.467467467467469e-06,
"loss": 2.9961,
"step": 364
},
{
"epoch": 3.29,
"learning_rate": 7.457457457457457e-06,
"loss": 3.0312,
"step": 365
},
{
"epoch": 3.3,
"learning_rate": 7.447447447447448e-06,
"loss": 3.0957,
"step": 366
},
{
"epoch": 3.31,
"learning_rate": 7.437437437437438e-06,
"loss": 3.2051,
"step": 367
},
{
"epoch": 3.32,
"learning_rate": 7.427427427427428e-06,
"loss": 3.0449,
"step": 368
},
{
"epoch": 3.32,
"learning_rate": 7.417417417417418e-06,
"loss": 3.1309,
"step": 369
},
{
"epoch": 3.33,
"learning_rate": 7.4074074074074075e-06,
"loss": 3.1523,
"step": 370
},
{
"epoch": 3.34,
"learning_rate": 7.397397397397398e-06,
"loss": 3.0391,
"step": 371
},
{
"epoch": 3.35,
"learning_rate": 7.387387387387388e-06,
"loss": 3.1406,
"step": 372
},
{
"epoch": 3.36,
"learning_rate": 7.377377377377378e-06,
"loss": 3.1719,
"step": 373
},
{
"epoch": 3.37,
"learning_rate": 7.367367367367368e-06,
"loss": 3.0625,
"step": 374
},
{
"epoch": 3.38,
"learning_rate": 7.3573573573573575e-06,
"loss": 3.2812,
"step": 375
},
{
"epoch": 3.39,
"learning_rate": 7.347347347347348e-06,
"loss": 3.0137,
"step": 376
},
{
"epoch": 3.4,
"learning_rate": 7.337337337337338e-06,
"loss": 3.1211,
"step": 377
},
{
"epoch": 3.41,
"learning_rate": 7.327327327327328e-06,
"loss": 3.3184,
"step": 378
},
{
"epoch": 3.41,
"learning_rate": 7.317317317317318e-06,
"loss": 3.0957,
"step": 379
},
{
"epoch": 3.42,
"learning_rate": 7.307307307307308e-06,
"loss": 3.3438,
"step": 380
},
{
"epoch": 3.43,
"learning_rate": 7.297297297297298e-06,
"loss": 3.4336,
"step": 381
},
{
"epoch": 3.44,
"learning_rate": 7.287287287287288e-06,
"loss": 3.3379,
"step": 382
},
{
"epoch": 3.45,
"learning_rate": 7.277277277277278e-06,
"loss": 3.0938,
"step": 383
},
{
"epoch": 3.46,
"learning_rate": 7.267267267267268e-06,
"loss": 3.0234,
"step": 384
},
{
"epoch": 3.47,
"learning_rate": 7.257257257257258e-06,
"loss": 3.0938,
"step": 385
},
{
"epoch": 3.48,
"learning_rate": 7.247247247247248e-06,
"loss": 3.1094,
"step": 386
},
{
"epoch": 3.49,
"learning_rate": 7.237237237237238e-06,
"loss": 3.0977,
"step": 387
},
{
"epoch": 3.5,
"learning_rate": 7.227227227227228e-06,
"loss": 3.2227,
"step": 388
},
{
"epoch": 3.5,
"learning_rate": 7.217217217217218e-06,
"loss": 3.1465,
"step": 389
},
{
"epoch": 3.51,
"learning_rate": 7.207207207207208e-06,
"loss": 3.3027,
"step": 390
},
{
"epoch": 3.52,
"learning_rate": 7.197197197197198e-06,
"loss": 3.3438,
"step": 391
},
{
"epoch": 3.53,
"learning_rate": 7.187187187187188e-06,
"loss": 3.2676,
"step": 392
},
{
"epoch": 3.54,
"learning_rate": 7.177177177177178e-06,
"loss": 3.1328,
"step": 393
},
{
"epoch": 3.55,
"learning_rate": 7.167167167167167e-06,
"loss": 3.2383,
"step": 394
},
{
"epoch": 3.56,
"learning_rate": 7.157157157157158e-06,
"loss": 3.3906,
"step": 395
},
{
"epoch": 3.57,
"learning_rate": 7.147147147147148e-06,
"loss": 3.2539,
"step": 396
},
{
"epoch": 3.58,
"learning_rate": 7.137137137137138e-06,
"loss": 3.1484,
"step": 397
},
{
"epoch": 3.59,
"learning_rate": 7.127127127127128e-06,
"loss": 2.832,
"step": 398
},
{
"epoch": 3.59,
"learning_rate": 7.117117117117117e-06,
"loss": 3.2695,
"step": 399
},
{
"epoch": 3.6,
"learning_rate": 7.107107107107107e-06,
"loss": 3.3613,
"step": 400
},
{
"epoch": 3.61,
"learning_rate": 7.097097097097097e-06,
"loss": 3.4004,
"step": 401
},
{
"epoch": 3.62,
"learning_rate": 7.087087087087087e-06,
"loss": 3.2344,
"step": 402
},
{
"epoch": 3.63,
"learning_rate": 7.0770770770770784e-06,
"loss": 2.9746,
"step": 403
},
{
"epoch": 3.64,
"learning_rate": 7.067067067067067e-06,
"loss": 3.2812,
"step": 404
},
{
"epoch": 3.65,
"learning_rate": 7.057057057057057e-06,
"loss": 3.0879,
"step": 405
},
{
"epoch": 3.66,
"learning_rate": 7.047047047047047e-06,
"loss": 3.1758,
"step": 406
},
{
"epoch": 3.67,
"learning_rate": 7.0370370370370375e-06,
"loss": 3.0566,
"step": 407
},
{
"epoch": 3.68,
"learning_rate": 7.027027027027028e-06,
"loss": 3.2109,
"step": 408
},
{
"epoch": 3.68,
"learning_rate": 7.017017017017017e-06,
"loss": 3.2461,
"step": 409
},
{
"epoch": 3.69,
"learning_rate": 7.007007007007007e-06,
"loss": 3.1328,
"step": 410
},
{
"epoch": 3.7,
"learning_rate": 6.996996996996997e-06,
"loss": 3.1406,
"step": 411
},
{
"epoch": 3.71,
"learning_rate": 6.9869869869869876e-06,
"loss": 3.0781,
"step": 412
},
{
"epoch": 3.72,
"learning_rate": 6.976976976976978e-06,
"loss": 3.0859,
"step": 413
},
{
"epoch": 3.73,
"learning_rate": 6.966966966966967e-06,
"loss": 3.2969,
"step": 414
},
{
"epoch": 3.74,
"learning_rate": 6.956956956956957e-06,
"loss": 2.9551,
"step": 415
},
{
"epoch": 3.75,
"learning_rate": 6.9469469469469474e-06,
"loss": 3.1016,
"step": 416
},
{
"epoch": 3.76,
"learning_rate": 6.936936936936938e-06,
"loss": 3.0957,
"step": 417
},
{
"epoch": 3.77,
"learning_rate": 6.926926926926928e-06,
"loss": 2.9297,
"step": 418
},
{
"epoch": 3.77,
"learning_rate": 6.916916916916917e-06,
"loss": 3.1953,
"step": 419
},
{
"epoch": 3.78,
"learning_rate": 6.906906906906907e-06,
"loss": 3.1582,
"step": 420
},
{
"epoch": 3.79,
"learning_rate": 6.8968968968968975e-06,
"loss": 3.0859,
"step": 421
},
{
"epoch": 3.8,
"learning_rate": 6.886886886886888e-06,
"loss": 3.3613,
"step": 422
},
{
"epoch": 3.81,
"learning_rate": 6.876876876876878e-06,
"loss": 3.2695,
"step": 423
},
{
"epoch": 3.82,
"learning_rate": 6.866866866866867e-06,
"loss": 3.2246,
"step": 424
},
{
"epoch": 3.83,
"learning_rate": 6.856856856856857e-06,
"loss": 2.8027,
"step": 425
},
{
"epoch": 3.84,
"learning_rate": 6.846846846846848e-06,
"loss": 3.2637,
"step": 426
},
{
"epoch": 3.85,
"learning_rate": 6.836836836836838e-06,
"loss": 3.332,
"step": 427
},
{
"epoch": 3.86,
"learning_rate": 6.826826826826828e-06,
"loss": 3.0684,
"step": 428
},
{
"epoch": 3.86,
"learning_rate": 6.816816816816817e-06,
"loss": 2.9375,
"step": 429
},
{
"epoch": 3.87,
"learning_rate": 6.8068068068068075e-06,
"loss": 3.1367,
"step": 430
},
{
"epoch": 3.88,
"learning_rate": 6.796796796796798e-06,
"loss": 3.2246,
"step": 431
},
{
"epoch": 3.89,
"learning_rate": 6.786786786786788e-06,
"loss": 3.0781,
"step": 432
},
{
"epoch": 3.9,
"learning_rate": 6.776776776776778e-06,
"loss": 2.9395,
"step": 433
},
{
"epoch": 3.91,
"learning_rate": 6.7667667667667665e-06,
"loss": 3.082,
"step": 434
},
{
"epoch": 3.92,
"learning_rate": 6.7567567567567575e-06,
"loss": 3.1289,
"step": 435
},
{
"epoch": 3.93,
"learning_rate": 6.746746746746748e-06,
"loss": 3.0469,
"step": 436
},
{
"epoch": 3.94,
"learning_rate": 6.736736736736738e-06,
"loss": 3.1777,
"step": 437
},
{
"epoch": 3.95,
"learning_rate": 6.726726726726728e-06,
"loss": 3.1309,
"step": 438
},
{
"epoch": 3.95,
"learning_rate": 6.716716716716717e-06,
"loss": 3.084,
"step": 439
},
{
"epoch": 3.96,
"learning_rate": 6.706706706706707e-06,
"loss": 3.1855,
"step": 440
},
{
"epoch": 3.97,
"learning_rate": 6.696696696696697e-06,
"loss": 3.1465,
"step": 441
},
{
"epoch": 3.98,
"learning_rate": 6.686686686686687e-06,
"loss": 3.2578,
"step": 442
},
{
"epoch": 3.99,
"learning_rate": 6.676676676676678e-06,
"loss": 3.0371,
"step": 443
},
{
"epoch": 4.0,
"learning_rate": 6.666666666666667e-06,
"loss": 3.0469,
"step": 444
},
{
"epoch": 4.01,
"learning_rate": 6.656656656656657e-06,
"loss": 3.3105,
"step": 445
},
{
"epoch": 4.02,
"learning_rate": 6.646646646646647e-06,
"loss": 3.1523,
"step": 446
},
{
"epoch": 4.03,
"learning_rate": 6.636636636636637e-06,
"loss": 3.3555,
"step": 447
},
{
"epoch": 4.04,
"learning_rate": 6.626626626626627e-06,
"loss": 3.0273,
"step": 448
},
{
"epoch": 4.05,
"learning_rate": 6.616616616616617e-06,
"loss": 3.0293,
"step": 449
},
{
"epoch": 4.05,
"learning_rate": 6.606606606606607e-06,
"loss": 3.3594,
"step": 450
},
{
"epoch": 4.06,
"learning_rate": 6.596596596596597e-06,
"loss": 3.1152,
"step": 451
},
{
"epoch": 4.07,
"learning_rate": 6.586586586586587e-06,
"loss": 3.1797,
"step": 452
},
{
"epoch": 4.08,
"learning_rate": 6.5765765765765775e-06,
"loss": 3.168,
"step": 453
},
{
"epoch": 4.09,
"learning_rate": 6.566566566566567e-06,
"loss": 3.1055,
"step": 454
},
{
"epoch": 4.1,
"learning_rate": 6.556556556556557e-06,
"loss": 3.1797,
"step": 455
},
{
"epoch": 4.11,
"learning_rate": 6.546546546546547e-06,
"loss": 3.0781,
"step": 456
},
{
"epoch": 4.12,
"learning_rate": 6.536536536536537e-06,
"loss": 3.1934,
"step": 457
},
{
"epoch": 4.13,
"learning_rate": 6.526526526526527e-06,
"loss": 3.3496,
"step": 458
},
{
"epoch": 4.14,
"learning_rate": 6.516516516516517e-06,
"loss": 3.0156,
"step": 459
},
{
"epoch": 4.14,
"learning_rate": 6.506506506506507e-06,
"loss": 3.0742,
"step": 460
},
{
"epoch": 4.15,
"learning_rate": 6.496496496496497e-06,
"loss": 3.2363,
"step": 461
},
{
"epoch": 4.16,
"learning_rate": 6.486486486486487e-06,
"loss": 3.1758,
"step": 462
},
{
"epoch": 4.17,
"learning_rate": 6.476476476476477e-06,
"loss": 3.2266,
"step": 463
},
{
"epoch": 4.18,
"learning_rate": 6.466466466466467e-06,
"loss": 2.9395,
"step": 464
},
{
"epoch": 4.19,
"learning_rate": 6.456456456456457e-06,
"loss": 3.373,
"step": 465
},
{
"epoch": 4.2,
"learning_rate": 6.446446446446447e-06,
"loss": 3.0664,
"step": 466
},
{
"epoch": 4.21,
"learning_rate": 6.4364364364364375e-06,
"loss": 3.1133,
"step": 467
},
{
"epoch": 4.22,
"learning_rate": 6.426426426426427e-06,
"loss": 3.0977,
"step": 468
},
{
"epoch": 4.23,
"learning_rate": 6.416416416416417e-06,
"loss": 3.2168,
"step": 469
},
{
"epoch": 4.23,
"learning_rate": 6.406406406406407e-06,
"loss": 3.0371,
"step": 470
},
{
"epoch": 4.24,
"learning_rate": 6.396396396396397e-06,
"loss": 3.3086,
"step": 471
},
{
"epoch": 4.25,
"learning_rate": 6.3863863863863875e-06,
"loss": 3.1328,
"step": 472
},
{
"epoch": 4.26,
"learning_rate": 6.376376376376376e-06,
"loss": 2.9844,
"step": 473
},
{
"epoch": 4.27,
"learning_rate": 6.366366366366366e-06,
"loss": 3.0488,
"step": 474
},
{
"epoch": 4.28,
"learning_rate": 6.356356356356357e-06,
"loss": 3.1504,
"step": 475
},
{
"epoch": 4.29,
"learning_rate": 6.3463463463463474e-06,
"loss": 2.6758,
"step": 476
},
{
"epoch": 4.3,
"learning_rate": 6.336336336336338e-06,
"loss": 3.2285,
"step": 477
},
{
"epoch": 4.31,
"learning_rate": 6.326326326326326e-06,
"loss": 3.1094,
"step": 478
},
{
"epoch": 4.32,
"learning_rate": 6.316316316316316e-06,
"loss": 3.0098,
"step": 479
},
{
"epoch": 4.32,
"learning_rate": 6.3063063063063065e-06,
"loss": 2.9961,
"step": 480
},
{
"epoch": 4.33,
"learning_rate": 6.296296296296297e-06,
"loss": 3.4141,
"step": 481
},
{
"epoch": 4.34,
"learning_rate": 6.286286286286287e-06,
"loss": 3.2988,
"step": 482
},
{
"epoch": 4.35,
"learning_rate": 6.276276276276276e-06,
"loss": 3.2617,
"step": 483
},
{
"epoch": 4.36,
"learning_rate": 6.266266266266266e-06,
"loss": 3.1406,
"step": 484
},
{
"epoch": 4.37,
"learning_rate": 6.2562562562562565e-06,
"loss": 3.125,
"step": 485
},
{
"epoch": 4.38,
"learning_rate": 6.246246246246247e-06,
"loss": 3.2441,
"step": 486
},
{
"epoch": 4.39,
"learning_rate": 6.236236236236237e-06,
"loss": 2.8496,
"step": 487
},
{
"epoch": 4.4,
"learning_rate": 6.226226226226226e-06,
"loss": 3.2441,
"step": 488
},
{
"epoch": 4.41,
"learning_rate": 6.2162162162162164e-06,
"loss": 3.0645,
"step": 489
},
{
"epoch": 4.41,
"learning_rate": 6.206206206206207e-06,
"loss": 3.0938,
"step": 490
},
{
"epoch": 4.42,
"learning_rate": 6.196196196196197e-06,
"loss": 3.0801,
"step": 491
},
{
"epoch": 4.43,
"learning_rate": 6.186186186186187e-06,
"loss": 3.1289,
"step": 492
},
{
"epoch": 4.44,
"learning_rate": 6.176176176176176e-06,
"loss": 3.1992,
"step": 493
},
{
"epoch": 4.45,
"learning_rate": 6.1661661661661665e-06,
"loss": 2.9961,
"step": 494
},
{
"epoch": 4.46,
"learning_rate": 6.156156156156157e-06,
"loss": 3.2559,
"step": 495
},
{
"epoch": 4.47,
"learning_rate": 6.146146146146147e-06,
"loss": 3.1738,
"step": 496
},
{
"epoch": 4.48,
"learning_rate": 6.136136136136137e-06,
"loss": 3.0449,
"step": 497
},
{
"epoch": 4.49,
"learning_rate": 6.126126126126126e-06,
"loss": 3.2246,
"step": 498
},
{
"epoch": 4.5,
"learning_rate": 6.1161161161161166e-06,
"loss": 3.0859,
"step": 499
},
{
"epoch": 4.5,
"learning_rate": 6.106106106106107e-06,
"loss": 3.332,
"step": 500
},
{
"epoch": 4.51,
"learning_rate": 6.096096096096097e-06,
"loss": 3.1387,
"step": 501
},
{
"epoch": 4.52,
"learning_rate": 6.086086086086087e-06,
"loss": 3.1016,
"step": 502
},
{
"epoch": 4.53,
"learning_rate": 6.0760760760760765e-06,
"loss": 3.2598,
"step": 503
},
{
"epoch": 4.54,
"learning_rate": 6.066066066066067e-06,
"loss": 3.1953,
"step": 504
},
{
"epoch": 4.55,
"learning_rate": 6.056056056056057e-06,
"loss": 3.2695,
"step": 505
},
{
"epoch": 4.56,
"learning_rate": 6.046046046046047e-06,
"loss": 3.2383,
"step": 506
},
{
"epoch": 4.57,
"learning_rate": 6.036036036036037e-06,
"loss": 3.25,
"step": 507
},
{
"epoch": 4.58,
"learning_rate": 6.0260260260260265e-06,
"loss": 2.8516,
"step": 508
},
{
"epoch": 4.59,
"learning_rate": 6.016016016016017e-06,
"loss": 3.3828,
"step": 509
},
{
"epoch": 4.59,
"learning_rate": 6.006006006006007e-06,
"loss": 3.0645,
"step": 510
},
{
"epoch": 4.6,
"learning_rate": 5.995995995995997e-06,
"loss": 3.0938,
"step": 511
},
{
"epoch": 4.61,
"learning_rate": 5.985985985985987e-06,
"loss": 3.334,
"step": 512
},
{
"epoch": 4.62,
"learning_rate": 5.975975975975976e-06,
"loss": 2.9648,
"step": 513
},
{
"epoch": 4.63,
"learning_rate": 5.965965965965966e-06,
"loss": 2.9902,
"step": 514
},
{
"epoch": 4.64,
"learning_rate": 5.955955955955957e-06,
"loss": 3.0195,
"step": 515
},
{
"epoch": 4.65,
"learning_rate": 5.945945945945947e-06,
"loss": 3.1465,
"step": 516
},
{
"epoch": 4.66,
"learning_rate": 5.935935935935936e-06,
"loss": 3.1934,
"step": 517
},
{
"epoch": 4.67,
"learning_rate": 5.925925925925926e-06,
"loss": 3.418,
"step": 518
},
{
"epoch": 4.68,
"learning_rate": 5.915915915915916e-06,
"loss": 3.0156,
"step": 519
},
{
"epoch": 4.68,
"learning_rate": 5.905905905905906e-06,
"loss": 3.1914,
"step": 520
},
{
"epoch": 4.69,
"learning_rate": 5.895895895895896e-06,
"loss": 3.2559,
"step": 521
},
{
"epoch": 4.7,
"learning_rate": 5.885885885885886e-06,
"loss": 3.168,
"step": 522
},
{
"epoch": 4.71,
"learning_rate": 5.875875875875876e-06,
"loss": 3.2246,
"step": 523
},
{
"epoch": 4.72,
"learning_rate": 5.865865865865866e-06,
"loss": 3.1094,
"step": 524
},
{
"epoch": 4.73,
"learning_rate": 5.855855855855856e-06,
"loss": 3.2695,
"step": 525
},
{
"epoch": 4.74,
"learning_rate": 5.8458458458458464e-06,
"loss": 3.0117,
"step": 526
},
{
"epoch": 4.75,
"learning_rate": 5.835835835835836e-06,
"loss": 3.084,
"step": 527
},
{
"epoch": 4.76,
"learning_rate": 5.825825825825826e-06,
"loss": 3.1289,
"step": 528
},
{
"epoch": 4.77,
"learning_rate": 5.815815815815816e-06,
"loss": 3.2168,
"step": 529
},
{
"epoch": 4.77,
"learning_rate": 5.805805805805806e-06,
"loss": 3.0781,
"step": 530
},
{
"epoch": 4.78,
"learning_rate": 5.7957957957957965e-06,
"loss": 3.375,
"step": 531
},
{
"epoch": 4.79,
"learning_rate": 5.785785785785786e-06,
"loss": 3.2207,
"step": 532
},
{
"epoch": 4.8,
"learning_rate": 5.775775775775776e-06,
"loss": 3.1152,
"step": 533
},
{
"epoch": 4.81,
"learning_rate": 5.765765765765766e-06,
"loss": 3.0605,
"step": 534
},
{
"epoch": 4.82,
"learning_rate": 5.755755755755756e-06,
"loss": 2.9414,
"step": 535
},
{
"epoch": 4.83,
"learning_rate": 5.7457457457457466e-06,
"loss": 3.1895,
"step": 536
},
{
"epoch": 4.84,
"learning_rate": 5.735735735735736e-06,
"loss": 3.1719,
"step": 537
},
{
"epoch": 4.85,
"learning_rate": 5.725725725725726e-06,
"loss": 3.2461,
"step": 538
},
{
"epoch": 4.86,
"learning_rate": 5.715715715715716e-06,
"loss": 3.082,
"step": 539
},
{
"epoch": 4.86,
"learning_rate": 5.7057057057057065e-06,
"loss": 3.0059,
"step": 540
},
{
"epoch": 4.87,
"learning_rate": 5.695695695695697e-06,
"loss": 3.0391,
"step": 541
},
{
"epoch": 4.88,
"learning_rate": 5.685685685685686e-06,
"loss": 3.2031,
"step": 542
},
{
"epoch": 4.89,
"learning_rate": 5.675675675675676e-06,
"loss": 3.1484,
"step": 543
},
{
"epoch": 4.9,
"learning_rate": 5.665665665665666e-06,
"loss": 2.7559,
"step": 544
},
{
"epoch": 4.91,
"learning_rate": 5.6556556556556565e-06,
"loss": 3.4102,
"step": 545
},
{
"epoch": 4.92,
"learning_rate": 5.645645645645647e-06,
"loss": 3.207,
"step": 546
},
{
"epoch": 4.93,
"learning_rate": 5.635635635635636e-06,
"loss": 3.3965,
"step": 547
},
{
"epoch": 4.94,
"learning_rate": 5.625625625625626e-06,
"loss": 3.0586,
"step": 548
},
{
"epoch": 4.95,
"learning_rate": 5.615615615615616e-06,
"loss": 3.0742,
"step": 549
},
{
"epoch": 4.95,
"learning_rate": 5.605605605605607e-06,
"loss": 3.2344,
"step": 550
},
{
"epoch": 4.96,
"learning_rate": 5.595595595595597e-06,
"loss": 2.9766,
"step": 551
},
{
"epoch": 4.97,
"learning_rate": 5.585585585585585e-06,
"loss": 3.2363,
"step": 552
},
{
"epoch": 4.98,
"learning_rate": 5.5755755755755755e-06,
"loss": 3.1465,
"step": 553
},
{
"epoch": 4.99,
"learning_rate": 5.565565565565566e-06,
"loss": 2.8984,
"step": 554
},
{
"epoch": 5.0,
"learning_rate": 5.555555555555557e-06,
"loss": 3.1914,
"step": 555
},
{
"epoch": 5.01,
"learning_rate": 5.545545545545547e-06,
"loss": 3.0293,
"step": 556
},
{
"epoch": 5.02,
"learning_rate": 5.535535535535535e-06,
"loss": 3.1074,
"step": 557
},
{
"epoch": 5.03,
"learning_rate": 5.5255255255255255e-06,
"loss": 3.1992,
"step": 558
},
{
"epoch": 5.04,
"learning_rate": 5.515515515515516e-06,
"loss": 3.2539,
"step": 559
},
{
"epoch": 5.05,
"learning_rate": 5.505505505505506e-06,
"loss": 2.9844,
"step": 560
},
{
"epoch": 5.05,
"learning_rate": 5.495495495495496e-06,
"loss": 3.0918,
"step": 561
},
{
"epoch": 5.06,
"learning_rate": 5.485485485485485e-06,
"loss": 3.209,
"step": 562
},
{
"epoch": 5.07,
"learning_rate": 5.475475475475476e-06,
"loss": 3.1289,
"step": 563
},
{
"epoch": 5.08,
"learning_rate": 5.465465465465466e-06,
"loss": 3.082,
"step": 564
},
{
"epoch": 5.09,
"learning_rate": 5.455455455455456e-06,
"loss": 3.1367,
"step": 565
},
{
"epoch": 5.1,
"learning_rate": 5.445445445445446e-06,
"loss": 3.1875,
"step": 566
},
{
"epoch": 5.11,
"learning_rate": 5.4354354354354355e-06,
"loss": 3.0781,
"step": 567
},
{
"epoch": 5.12,
"learning_rate": 5.425425425425426e-06,
"loss": 2.9648,
"step": 568
},
{
"epoch": 5.13,
"learning_rate": 5.415415415415416e-06,
"loss": 2.998,
"step": 569
},
{
"epoch": 5.14,
"learning_rate": 5.405405405405406e-06,
"loss": 2.9727,
"step": 570
},
{
"epoch": 5.14,
"learning_rate": 5.395395395395396e-06,
"loss": 3.2422,
"step": 571
},
{
"epoch": 5.15,
"learning_rate": 5.3853853853853856e-06,
"loss": 3.0996,
"step": 572
},
{
"epoch": 5.16,
"learning_rate": 5.375375375375376e-06,
"loss": 2.9785,
"step": 573
},
{
"epoch": 5.17,
"learning_rate": 5.365365365365366e-06,
"loss": 3.3477,
"step": 574
},
{
"epoch": 5.18,
"learning_rate": 5.355355355355356e-06,
"loss": 3.2539,
"step": 575
},
{
"epoch": 5.19,
"learning_rate": 5.345345345345346e-06,
"loss": 2.9785,
"step": 576
},
{
"epoch": 5.2,
"learning_rate": 5.335335335335336e-06,
"loss": 3.2148,
"step": 577
},
{
"epoch": 5.21,
"learning_rate": 5.325325325325326e-06,
"loss": 3.0742,
"step": 578
},
{
"epoch": 5.22,
"learning_rate": 5.315315315315316e-06,
"loss": 3.3828,
"step": 579
},
{
"epoch": 5.23,
"learning_rate": 5.305305305305306e-06,
"loss": 2.9688,
"step": 580
},
{
"epoch": 5.23,
"learning_rate": 5.2952952952952955e-06,
"loss": 3.1914,
"step": 581
},
{
"epoch": 5.24,
"learning_rate": 5.285285285285286e-06,
"loss": 3.3008,
"step": 582
},
{
"epoch": 5.25,
"learning_rate": 5.275275275275276e-06,
"loss": 3.2773,
"step": 583
},
{
"epoch": 5.26,
"learning_rate": 5.265265265265266e-06,
"loss": 3.0469,
"step": 584
},
{
"epoch": 5.27,
"learning_rate": 5.255255255255256e-06,
"loss": 3.416,
"step": 585
},
{
"epoch": 5.28,
"learning_rate": 5.245245245245245e-06,
"loss": 3.0254,
"step": 586
},
{
"epoch": 5.29,
"learning_rate": 5.235235235235236e-06,
"loss": 3.2871,
"step": 587
},
{
"epoch": 5.3,
"learning_rate": 5.225225225225226e-06,
"loss": 3.3242,
"step": 588
},
{
"epoch": 5.31,
"learning_rate": 5.215215215215216e-06,
"loss": 3.2383,
"step": 589
},
{
"epoch": 5.32,
"learning_rate": 5.205205205205206e-06,
"loss": 3.3457,
"step": 590
},
{
"epoch": 5.32,
"learning_rate": 5.195195195195195e-06,
"loss": 2.9941,
"step": 591
},
{
"epoch": 5.33,
"learning_rate": 5.185185185185185e-06,
"loss": 3.123,
"step": 592
},
{
"epoch": 5.34,
"learning_rate": 5.175175175175175e-06,
"loss": 3.25,
"step": 593
},
{
"epoch": 5.35,
"learning_rate": 5.165165165165165e-06,
"loss": 3.2656,
"step": 594
},
{
"epoch": 5.36,
"learning_rate": 5.155155155155156e-06,
"loss": 3.0078,
"step": 595
},
{
"epoch": 5.37,
"learning_rate": 5.145145145145145e-06,
"loss": 3.2129,
"step": 596
},
{
"epoch": 5.38,
"learning_rate": 5.135135135135135e-06,
"loss": 3.2637,
"step": 597
},
{
"epoch": 5.39,
"learning_rate": 5.125125125125125e-06,
"loss": 3.2637,
"step": 598
},
{
"epoch": 5.4,
"learning_rate": 5.115115115115115e-06,
"loss": 3.1914,
"step": 599
},
{
"epoch": 5.41,
"learning_rate": 5.105105105105106e-06,
"loss": 3.2129,
"step": 600
},
{
"epoch": 5.41,
"learning_rate": 5.095095095095095e-06,
"loss": 3.0098,
"step": 601
},
{
"epoch": 5.42,
"learning_rate": 5.085085085085085e-06,
"loss": 3.1523,
"step": 602
},
{
"epoch": 5.43,
"learning_rate": 5.075075075075075e-06,
"loss": 3.125,
"step": 603
},
{
"epoch": 5.44,
"learning_rate": 5.0650650650650655e-06,
"loss": 3.0938,
"step": 604
},
{
"epoch": 5.45,
"learning_rate": 5.055055055055056e-06,
"loss": 3.2129,
"step": 605
},
{
"epoch": 5.46,
"learning_rate": 5.045045045045045e-06,
"loss": 3.2598,
"step": 606
},
{
"epoch": 5.47,
"learning_rate": 5.035035035035035e-06,
"loss": 3.0547,
"step": 607
},
{
"epoch": 5.48,
"learning_rate": 5.025025025025025e-06,
"loss": 2.9492,
"step": 608
},
{
"epoch": 5.49,
"learning_rate": 5.0150150150150156e-06,
"loss": 3.2344,
"step": 609
},
{
"epoch": 5.5,
"learning_rate": 5.005005005005006e-06,
"loss": 3.4609,
"step": 610
},
{
"epoch": 5.5,
"learning_rate": 4.994994994994996e-06,
"loss": 3.0508,
"step": 611
},
{
"epoch": 5.51,
"learning_rate": 4.984984984984985e-06,
"loss": 3.2559,
"step": 612
},
{
"epoch": 5.52,
"learning_rate": 4.9749749749749754e-06,
"loss": 3.0,
"step": 613
},
{
"epoch": 5.53,
"learning_rate": 4.964964964964966e-06,
"loss": 3.2969,
"step": 614
},
{
"epoch": 5.54,
"learning_rate": 4.954954954954955e-06,
"loss": 3.1191,
"step": 615
},
{
"epoch": 5.55,
"learning_rate": 4.944944944944945e-06,
"loss": 3.0137,
"step": 616
},
{
"epoch": 5.56,
"learning_rate": 4.934934934934935e-06,
"loss": 3.0156,
"step": 617
},
{
"epoch": 5.57,
"learning_rate": 4.9249249249249255e-06,
"loss": 3.1777,
"step": 618
},
{
"epoch": 5.58,
"learning_rate": 4.914914914914916e-06,
"loss": 3.252,
"step": 619
},
{
"epoch": 5.59,
"learning_rate": 4.904904904904905e-06,
"loss": 3.0801,
"step": 620
},
{
"epoch": 5.59,
"learning_rate": 4.894894894894895e-06,
"loss": 3.1641,
"step": 621
},
{
"epoch": 5.6,
"learning_rate": 4.884884884884885e-06,
"loss": 2.9492,
"step": 622
},
{
"epoch": 5.61,
"learning_rate": 4.874874874874876e-06,
"loss": 3.1562,
"step": 623
},
{
"epoch": 5.62,
"learning_rate": 4.864864864864866e-06,
"loss": 3.0723,
"step": 624
},
{
"epoch": 5.63,
"learning_rate": 4.854854854854855e-06,
"loss": 3.0332,
"step": 625
},
{
"epoch": 5.64,
"learning_rate": 4.844844844844845e-06,
"loss": 3.1309,
"step": 626
},
{
"epoch": 5.65,
"learning_rate": 4.8348348348348355e-06,
"loss": 3.2773,
"step": 627
},
{
"epoch": 5.66,
"learning_rate": 4.824824824824826e-06,
"loss": 3.0117,
"step": 628
},
{
"epoch": 5.67,
"learning_rate": 4.814814814814815e-06,
"loss": 3.1211,
"step": 629
},
{
"epoch": 5.68,
"learning_rate": 4.804804804804805e-06,
"loss": 3.1348,
"step": 630
},
{
"epoch": 5.68,
"learning_rate": 4.794794794794795e-06,
"loss": 3.0234,
"step": 631
},
{
"epoch": 5.69,
"learning_rate": 4.784784784784785e-06,
"loss": 3.0586,
"step": 632
},
{
"epoch": 5.7,
"learning_rate": 4.774774774774775e-06,
"loss": 3.4473,
"step": 633
},
{
"epoch": 5.71,
"learning_rate": 4.764764764764765e-06,
"loss": 3.1641,
"step": 634
},
{
"epoch": 5.72,
"learning_rate": 4.754754754754755e-06,
"loss": 3.0078,
"step": 635
},
{
"epoch": 5.73,
"learning_rate": 4.7447447447447454e-06,
"loss": 3.0938,
"step": 636
},
{
"epoch": 5.74,
"learning_rate": 4.734734734734735e-06,
"loss": 3.0176,
"step": 637
},
{
"epoch": 5.75,
"learning_rate": 4.724724724724725e-06,
"loss": 3.1602,
"step": 638
},
{
"epoch": 5.76,
"learning_rate": 4.714714714714715e-06,
"loss": 3.0527,
"step": 639
},
{
"epoch": 5.77,
"learning_rate": 4.704704704704705e-06,
"loss": 3.0117,
"step": 640
},
{
"epoch": 5.77,
"learning_rate": 4.6946946946946955e-06,
"loss": 2.873,
"step": 641
},
{
"epoch": 5.78,
"learning_rate": 4.684684684684685e-06,
"loss": 2.8789,
"step": 642
},
{
"epoch": 5.79,
"learning_rate": 4.674674674674675e-06,
"loss": 2.9902,
"step": 643
},
{
"epoch": 5.8,
"learning_rate": 4.664664664664665e-06,
"loss": 3.2559,
"step": 644
},
{
"epoch": 5.81,
"learning_rate": 4.654654654654655e-06,
"loss": 3.1055,
"step": 645
},
{
"epoch": 5.82,
"learning_rate": 4.6446446446446456e-06,
"loss": 2.8809,
"step": 646
},
{
"epoch": 5.83,
"learning_rate": 4.634634634634635e-06,
"loss": 2.8516,
"step": 647
},
{
"epoch": 5.84,
"learning_rate": 4.624624624624625e-06,
"loss": 3.1094,
"step": 648
},
{
"epoch": 5.85,
"learning_rate": 4.614614614614614e-06,
"loss": 3.1641,
"step": 649
},
{
"epoch": 5.86,
"learning_rate": 4.604604604604605e-06,
"loss": 3.0547,
"step": 650
},
{
"epoch": 5.86,
"learning_rate": 4.594594594594596e-06,
"loss": 3.125,
"step": 651
},
{
"epoch": 5.87,
"learning_rate": 4.584584584584585e-06,
"loss": 3.252,
"step": 652
},
{
"epoch": 5.88,
"learning_rate": 4.574574574574575e-06,
"loss": 3.1758,
"step": 653
},
{
"epoch": 5.89,
"learning_rate": 4.5645645645645645e-06,
"loss": 3.1387,
"step": 654
},
{
"epoch": 5.9,
"learning_rate": 4.554554554554555e-06,
"loss": 3.4688,
"step": 655
},
{
"epoch": 5.91,
"learning_rate": 4.544544544544545e-06,
"loss": 3.1855,
"step": 656
},
{
"epoch": 5.92,
"learning_rate": 4.534534534534535e-06,
"loss": 3.2285,
"step": 657
},
{
"epoch": 5.93,
"learning_rate": 4.524524524524525e-06,
"loss": 3.3047,
"step": 658
},
{
"epoch": 5.94,
"learning_rate": 4.5145145145145146e-06,
"loss": 3.252,
"step": 659
},
{
"epoch": 5.95,
"learning_rate": 4.504504504504505e-06,
"loss": 3.3457,
"step": 660
},
{
"epoch": 5.95,
"learning_rate": 4.494494494494495e-06,
"loss": 3.1406,
"step": 661
},
{
"epoch": 5.96,
"learning_rate": 4.484484484484485e-06,
"loss": 2.959,
"step": 662
},
{
"epoch": 5.97,
"learning_rate": 4.474474474474475e-06,
"loss": 3.2773,
"step": 663
},
{
"epoch": 5.98,
"learning_rate": 4.464464464464465e-06,
"loss": 3.0312,
"step": 664
},
{
"epoch": 5.99,
"learning_rate": 4.454454454454455e-06,
"loss": 3.043,
"step": 665
},
{
"epoch": 6.0,
"learning_rate": 4.444444444444444e-06,
"loss": 3.207,
"step": 666
},
{
"epoch": 6.01,
"learning_rate": 4.434434434434435e-06,
"loss": 3.1582,
"step": 667
},
{
"epoch": 6.02,
"learning_rate": 4.424424424424425e-06,
"loss": 3.2383,
"step": 668
},
{
"epoch": 6.03,
"learning_rate": 4.414414414414415e-06,
"loss": 3.0195,
"step": 669
},
{
"epoch": 6.04,
"learning_rate": 4.404404404404405e-06,
"loss": 3.0371,
"step": 670
},
{
"epoch": 6.05,
"learning_rate": 4.394394394394394e-06,
"loss": 2.8926,
"step": 671
},
{
"epoch": 6.05,
"learning_rate": 4.384384384384384e-06,
"loss": 3.2598,
"step": 672
},
{
"epoch": 6.06,
"learning_rate": 4.374374374374375e-06,
"loss": 3.2812,
"step": 673
},
{
"epoch": 6.07,
"learning_rate": 4.364364364364365e-06,
"loss": 3.0664,
"step": 674
},
{
"epoch": 6.08,
"learning_rate": 4.354354354354355e-06,
"loss": 3.043,
"step": 675
},
{
"epoch": 6.09,
"learning_rate": 4.344344344344344e-06,
"loss": 3.0078,
"step": 676
},
{
"epoch": 6.1,
"learning_rate": 4.3343343343343345e-06,
"loss": 3.1133,
"step": 677
},
{
"epoch": 6.11,
"learning_rate": 4.324324324324325e-06,
"loss": 3.1055,
"step": 678
},
{
"epoch": 6.12,
"learning_rate": 4.314314314314315e-06,
"loss": 3.3027,
"step": 679
},
{
"epoch": 6.13,
"learning_rate": 4.304304304304305e-06,
"loss": 3.1328,
"step": 680
},
{
"epoch": 6.14,
"learning_rate": 4.294294294294294e-06,
"loss": 3.1602,
"step": 681
},
{
"epoch": 6.14,
"learning_rate": 4.2842842842842845e-06,
"loss": 3.0918,
"step": 682
},
{
"epoch": 6.15,
"learning_rate": 4.274274274274275e-06,
"loss": 3.1914,
"step": 683
},
{
"epoch": 6.16,
"learning_rate": 4.264264264264265e-06,
"loss": 3.0449,
"step": 684
},
{
"epoch": 6.17,
"learning_rate": 4.254254254254255e-06,
"loss": 3.3457,
"step": 685
},
{
"epoch": 6.18,
"learning_rate": 4.2442442442442444e-06,
"loss": 3.1387,
"step": 686
},
{
"epoch": 6.19,
"learning_rate": 4.234234234234235e-06,
"loss": 2.9785,
"step": 687
},
{
"epoch": 6.2,
"learning_rate": 4.224224224224225e-06,
"loss": 2.7988,
"step": 688
},
{
"epoch": 6.21,
"learning_rate": 4.214214214214214e-06,
"loss": 3.2715,
"step": 689
},
{
"epoch": 6.22,
"learning_rate": 4.204204204204204e-06,
"loss": 3.2773,
"step": 690
},
{
"epoch": 6.23,
"learning_rate": 4.1941941941941945e-06,
"loss": 3.2461,
"step": 691
},
{
"epoch": 6.23,
"learning_rate": 4.184184184184185e-06,
"loss": 3.0215,
"step": 692
},
{
"epoch": 6.24,
"learning_rate": 4.174174174174174e-06,
"loss": 3.1465,
"step": 693
},
{
"epoch": 6.25,
"learning_rate": 4.164164164164164e-06,
"loss": 2.875,
"step": 694
},
{
"epoch": 6.26,
"learning_rate": 4.154154154154154e-06,
"loss": 3.0781,
"step": 695
},
{
"epoch": 6.27,
"learning_rate": 4.1441441441441446e-06,
"loss": 3.2168,
"step": 696
},
{
"epoch": 6.28,
"learning_rate": 4.134134134134135e-06,
"loss": 3.0215,
"step": 697
},
{
"epoch": 6.29,
"learning_rate": 4.124124124124124e-06,
"loss": 3.1504,
"step": 698
},
{
"epoch": 6.3,
"learning_rate": 4.114114114114114e-06,
"loss": 3.3789,
"step": 699
},
{
"epoch": 6.31,
"learning_rate": 4.1041041041041045e-06,
"loss": 2.7676,
"step": 700
},
{
"epoch": 6.32,
"learning_rate": 4.094094094094095e-06,
"loss": 2.7422,
"step": 701
},
{
"epoch": 6.32,
"learning_rate": 4.084084084084085e-06,
"loss": 2.9551,
"step": 702
},
{
"epoch": 6.33,
"learning_rate": 4.074074074074074e-06,
"loss": 3.2578,
"step": 703
},
{
"epoch": 6.34,
"learning_rate": 4.064064064064064e-06,
"loss": 2.9922,
"step": 704
},
{
"epoch": 6.35,
"learning_rate": 4.0540540540540545e-06,
"loss": 3.3066,
"step": 705
},
{
"epoch": 6.36,
"learning_rate": 4.044044044044044e-06,
"loss": 3.252,
"step": 706
},
{
"epoch": 6.37,
"learning_rate": 4.034034034034035e-06,
"loss": 2.8281,
"step": 707
},
{
"epoch": 6.38,
"learning_rate": 4.024024024024024e-06,
"loss": 3.1113,
"step": 708
},
{
"epoch": 6.39,
"learning_rate": 4.014014014014014e-06,
"loss": 3.2637,
"step": 709
},
{
"epoch": 6.4,
"learning_rate": 4.004004004004005e-06,
"loss": 3.1914,
"step": 710
},
{
"epoch": 6.41,
"learning_rate": 3.993993993993994e-06,
"loss": 3.2207,
"step": 711
},
{
"epoch": 6.41,
"learning_rate": 3.983983983983984e-06,
"loss": 3.1914,
"step": 712
},
{
"epoch": 6.42,
"learning_rate": 3.973973973973974e-06,
"loss": 3.2246,
"step": 713
},
{
"epoch": 6.43,
"learning_rate": 3.9639639639639645e-06,
"loss": 2.9844,
"step": 714
},
{
"epoch": 6.44,
"learning_rate": 3.953953953953955e-06,
"loss": 3.2051,
"step": 715
},
{
"epoch": 6.45,
"learning_rate": 3.943943943943944e-06,
"loss": 3.1133,
"step": 716
},
{
"epoch": 6.46,
"learning_rate": 3.933933933933934e-06,
"loss": 2.9531,
"step": 717
},
{
"epoch": 6.47,
"learning_rate": 3.923923923923924e-06,
"loss": 3.334,
"step": 718
},
{
"epoch": 6.48,
"learning_rate": 3.9139139139139145e-06,
"loss": 3.1523,
"step": 719
},
{
"epoch": 6.49,
"learning_rate": 3.903903903903904e-06,
"loss": 3.1523,
"step": 720
},
{
"epoch": 6.5,
"learning_rate": 3.893893893893894e-06,
"loss": 3.2148,
"step": 721
},
{
"epoch": 6.5,
"learning_rate": 3.883883883883884e-06,
"loss": 2.9434,
"step": 722
},
{
"epoch": 6.51,
"learning_rate": 3.8738738738738744e-06,
"loss": 2.9453,
"step": 723
},
{
"epoch": 6.52,
"learning_rate": 3.863863863863865e-06,
"loss": 3.2461,
"step": 724
},
{
"epoch": 6.53,
"learning_rate": 3.853853853853854e-06,
"loss": 3.1387,
"step": 725
},
{
"epoch": 6.54,
"learning_rate": 3.843843843843844e-06,
"loss": 3.248,
"step": 726
},
{
"epoch": 6.55,
"learning_rate": 3.833833833833834e-06,
"loss": 3.2871,
"step": 727
},
{
"epoch": 6.56,
"learning_rate": 3.823823823823824e-06,
"loss": 3.1387,
"step": 728
},
{
"epoch": 6.57,
"learning_rate": 3.8138138138138143e-06,
"loss": 3.1426,
"step": 729
},
{
"epoch": 6.58,
"learning_rate": 3.803803803803804e-06,
"loss": 3.0508,
"step": 730
},
{
"epoch": 6.59,
"learning_rate": 3.793793793793794e-06,
"loss": 3.4473,
"step": 731
},
{
"epoch": 6.59,
"learning_rate": 3.7837837837837844e-06,
"loss": 3.1621,
"step": 732
},
{
"epoch": 6.6,
"learning_rate": 3.773773773773774e-06,
"loss": 3.2617,
"step": 733
},
{
"epoch": 6.61,
"learning_rate": 3.7637637637637643e-06,
"loss": 3.0703,
"step": 734
},
{
"epoch": 6.62,
"learning_rate": 3.7537537537537537e-06,
"loss": 3.2598,
"step": 735
},
{
"epoch": 6.63,
"learning_rate": 3.743743743743744e-06,
"loss": 2.9609,
"step": 736
},
{
"epoch": 6.64,
"learning_rate": 3.7337337337337345e-06,
"loss": 3.3223,
"step": 737
},
{
"epoch": 6.65,
"learning_rate": 3.723723723723724e-06,
"loss": 3.2188,
"step": 738
},
{
"epoch": 6.66,
"learning_rate": 3.713713713713714e-06,
"loss": 3.1406,
"step": 739
},
{
"epoch": 6.67,
"learning_rate": 3.7037037037037037e-06,
"loss": 3.0312,
"step": 740
},
{
"epoch": 6.68,
"learning_rate": 3.693693693693694e-06,
"loss": 3.0488,
"step": 741
},
{
"epoch": 6.68,
"learning_rate": 3.683683683683684e-06,
"loss": 3.0977,
"step": 742
},
{
"epoch": 6.69,
"learning_rate": 3.673673673673674e-06,
"loss": 3.1289,
"step": 743
},
{
"epoch": 6.7,
"learning_rate": 3.663663663663664e-06,
"loss": 3.0645,
"step": 744
},
{
"epoch": 6.71,
"learning_rate": 3.653653653653654e-06,
"loss": 3.1211,
"step": 745
},
{
"epoch": 6.72,
"learning_rate": 3.643643643643644e-06,
"loss": 3.4707,
"step": 746
},
{
"epoch": 6.73,
"learning_rate": 3.633633633633634e-06,
"loss": 3.0059,
"step": 747
},
{
"epoch": 6.74,
"learning_rate": 3.623623623623624e-06,
"loss": 3.0449,
"step": 748
},
{
"epoch": 6.75,
"learning_rate": 3.613613613613614e-06,
"loss": 3.2871,
"step": 749
},
{
"epoch": 6.76,
"learning_rate": 3.603603603603604e-06,
"loss": 2.8672,
"step": 750
},
{
"epoch": 6.77,
"learning_rate": 3.593593593593594e-06,
"loss": 3.1699,
"step": 751
},
{
"epoch": 6.77,
"learning_rate": 3.5835835835835834e-06,
"loss": 3.0449,
"step": 752
},
{
"epoch": 6.78,
"learning_rate": 3.573573573573574e-06,
"loss": 2.8223,
"step": 753
},
{
"epoch": 6.79,
"learning_rate": 3.563563563563564e-06,
"loss": 3.2969,
"step": 754
},
{
"epoch": 6.8,
"learning_rate": 3.5535535535535535e-06,
"loss": 3.4629,
"step": 755
},
{
"epoch": 6.81,
"learning_rate": 3.5435435435435437e-06,
"loss": 3.0605,
"step": 756
},
{
"epoch": 6.82,
"learning_rate": 3.5335335335335335e-06,
"loss": 3.2539,
"step": 757
},
{
"epoch": 6.83,
"learning_rate": 3.5235235235235237e-06,
"loss": 3.082,
"step": 758
},
{
"epoch": 6.84,
"learning_rate": 3.513513513513514e-06,
"loss": 2.9375,
"step": 759
},
{
"epoch": 6.85,
"learning_rate": 3.5035035035035036e-06,
"loss": 3.0762,
"step": 760
},
{
"epoch": 6.86,
"learning_rate": 3.4934934934934938e-06,
"loss": 3.3047,
"step": 761
},
{
"epoch": 6.86,
"learning_rate": 3.4834834834834835e-06,
"loss": 3.2051,
"step": 762
},
{
"epoch": 6.87,
"learning_rate": 3.4734734734734737e-06,
"loss": 3.2773,
"step": 763
},
{
"epoch": 6.88,
"learning_rate": 3.463463463463464e-06,
"loss": 3.2422,
"step": 764
},
{
"epoch": 6.89,
"learning_rate": 3.4534534534534537e-06,
"loss": 3.4688,
"step": 765
},
{
"epoch": 6.9,
"learning_rate": 3.443443443443444e-06,
"loss": 3.3223,
"step": 766
},
{
"epoch": 6.91,
"learning_rate": 3.4334334334334336e-06,
"loss": 3.1406,
"step": 767
},
{
"epoch": 6.92,
"learning_rate": 3.423423423423424e-06,
"loss": 3.2773,
"step": 768
},
{
"epoch": 6.93,
"learning_rate": 3.413413413413414e-06,
"loss": 3.0469,
"step": 769
},
{
"epoch": 6.94,
"learning_rate": 3.4034034034034037e-06,
"loss": 2.9121,
"step": 770
},
{
"epoch": 6.95,
"learning_rate": 3.393393393393394e-06,
"loss": 3.2695,
"step": 771
},
{
"epoch": 6.95,
"learning_rate": 3.3833833833833833e-06,
"loss": 3.1523,
"step": 772
},
{
"epoch": 6.96,
"learning_rate": 3.373373373373374e-06,
"loss": 3.0957,
"step": 773
},
{
"epoch": 6.97,
"learning_rate": 3.363363363363364e-06,
"loss": 3.2031,
"step": 774
},
{
"epoch": 6.98,
"learning_rate": 3.3533533533533534e-06,
"loss": 3.1406,
"step": 775
},
{
"epoch": 6.99,
"learning_rate": 3.3433433433433436e-06,
"loss": 3.1426,
"step": 776
},
{
"epoch": 7.0,
"learning_rate": 3.3333333333333333e-06,
"loss": 3.1152,
"step": 777
},
{
"epoch": 7.01,
"learning_rate": 3.3233233233233235e-06,
"loss": 3.25,
"step": 778
},
{
"epoch": 7.02,
"learning_rate": 3.3133133133133137e-06,
"loss": 3.1387,
"step": 779
},
{
"epoch": 7.03,
"learning_rate": 3.3033033033033035e-06,
"loss": 3.1641,
"step": 780
},
{
"epoch": 7.04,
"learning_rate": 3.2932932932932936e-06,
"loss": 3.1035,
"step": 781
},
{
"epoch": 7.05,
"learning_rate": 3.2832832832832834e-06,
"loss": 3.2188,
"step": 782
},
{
"epoch": 7.05,
"learning_rate": 3.2732732732732736e-06,
"loss": 3.3848,
"step": 783
},
{
"epoch": 7.06,
"learning_rate": 3.2632632632632633e-06,
"loss": 3.2305,
"step": 784
},
{
"epoch": 7.07,
"learning_rate": 3.2532532532532535e-06,
"loss": 3.0684,
"step": 785
},
{
"epoch": 7.08,
"learning_rate": 3.2432432432432437e-06,
"loss": 3.1797,
"step": 786
},
{
"epoch": 7.09,
"learning_rate": 3.2332332332332335e-06,
"loss": 3.1016,
"step": 787
},
{
"epoch": 7.1,
"learning_rate": 3.2232232232232236e-06,
"loss": 3.1484,
"step": 788
},
{
"epoch": 7.11,
"learning_rate": 3.2132132132132134e-06,
"loss": 3.1895,
"step": 789
},
{
"epoch": 7.12,
"learning_rate": 3.2032032032032036e-06,
"loss": 3.1758,
"step": 790
},
{
"epoch": 7.13,
"learning_rate": 3.1931931931931938e-06,
"loss": 3.2227,
"step": 791
},
{
"epoch": 7.14,
"learning_rate": 3.183183183183183e-06,
"loss": 3.0449,
"step": 792
},
{
"epoch": 7.14,
"learning_rate": 3.1731731731731737e-06,
"loss": 3.0664,
"step": 793
},
{
"epoch": 7.15,
"learning_rate": 3.163163163163163e-06,
"loss": 3.1914,
"step": 794
},
{
"epoch": 7.16,
"learning_rate": 3.1531531531531532e-06,
"loss": 3.0176,
"step": 795
},
{
"epoch": 7.17,
"learning_rate": 3.1431431431431434e-06,
"loss": 3.0859,
"step": 796
},
{
"epoch": 7.18,
"learning_rate": 3.133133133133133e-06,
"loss": 2.9922,
"step": 797
},
{
"epoch": 7.19,
"learning_rate": 3.1231231231231234e-06,
"loss": 3.0625,
"step": 798
},
{
"epoch": 7.2,
"learning_rate": 3.113113113113113e-06,
"loss": 3.1934,
"step": 799
},
{
"epoch": 7.21,
"learning_rate": 3.1031031031031033e-06,
"loss": 3.248,
"step": 800
},
{
"epoch": 7.22,
"learning_rate": 3.0930930930930935e-06,
"loss": 3.0469,
"step": 801
},
{
"epoch": 7.23,
"learning_rate": 3.0830830830830832e-06,
"loss": 3.2539,
"step": 802
},
{
"epoch": 7.23,
"learning_rate": 3.0730730730730734e-06,
"loss": 3.0957,
"step": 803
},
{
"epoch": 7.24,
"learning_rate": 3.063063063063063e-06,
"loss": 3.1367,
"step": 804
},
{
"epoch": 7.25,
"learning_rate": 3.0530530530530534e-06,
"loss": 2.9668,
"step": 805
},
{
"epoch": 7.26,
"learning_rate": 3.0430430430430436e-06,
"loss": 3.4141,
"step": 806
},
{
"epoch": 7.27,
"learning_rate": 3.0330330330330333e-06,
"loss": 3.3301,
"step": 807
},
{
"epoch": 7.28,
"learning_rate": 3.0230230230230235e-06,
"loss": 3.0293,
"step": 808
},
{
"epoch": 7.29,
"learning_rate": 3.0130130130130133e-06,
"loss": 3.2227,
"step": 809
},
{
"epoch": 7.3,
"learning_rate": 3.0030030030030034e-06,
"loss": 2.7285,
"step": 810
},
{
"epoch": 7.31,
"learning_rate": 2.9929929929929936e-06,
"loss": 3.1836,
"step": 811
},
{
"epoch": 7.32,
"learning_rate": 2.982982982982983e-06,
"loss": 3.0469,
"step": 812
},
{
"epoch": 7.32,
"learning_rate": 2.9729729729729736e-06,
"loss": 2.9297,
"step": 813
},
{
"epoch": 7.33,
"learning_rate": 2.962962962962963e-06,
"loss": 2.832,
"step": 814
},
{
"epoch": 7.34,
"learning_rate": 2.952952952952953e-06,
"loss": 3.0625,
"step": 815
},
{
"epoch": 7.35,
"learning_rate": 2.942942942942943e-06,
"loss": 3.0234,
"step": 816
},
{
"epoch": 7.36,
"learning_rate": 2.932932932932933e-06,
"loss": 3.2051,
"step": 817
},
{
"epoch": 7.37,
"learning_rate": 2.9229229229229232e-06,
"loss": 3.1309,
"step": 818
},
{
"epoch": 7.38,
"learning_rate": 2.912912912912913e-06,
"loss": 2.8984,
"step": 819
},
{
"epoch": 7.39,
"learning_rate": 2.902902902902903e-06,
"loss": 2.8047,
"step": 820
},
{
"epoch": 7.4,
"learning_rate": 2.892892892892893e-06,
"loss": 3.1289,
"step": 821
},
{
"epoch": 7.41,
"learning_rate": 2.882882882882883e-06,
"loss": 3.0801,
"step": 822
},
{
"epoch": 7.41,
"learning_rate": 2.8728728728728733e-06,
"loss": 3.1445,
"step": 823
},
{
"epoch": 7.42,
"learning_rate": 2.862862862862863e-06,
"loss": 3.2539,
"step": 824
},
{
"epoch": 7.43,
"learning_rate": 2.8528528528528532e-06,
"loss": 3.334,
"step": 825
},
{
"epoch": 7.44,
"learning_rate": 2.842842842842843e-06,
"loss": 3.1367,
"step": 826
},
{
"epoch": 7.45,
"learning_rate": 2.832832832832833e-06,
"loss": 3.123,
"step": 827
},
{
"epoch": 7.46,
"learning_rate": 2.8228228228228234e-06,
"loss": 3.2031,
"step": 828
},
{
"epoch": 7.47,
"learning_rate": 2.812812812812813e-06,
"loss": 2.9961,
"step": 829
},
{
"epoch": 7.48,
"learning_rate": 2.8028028028028033e-06,
"loss": 2.9336,
"step": 830
},
{
"epoch": 7.49,
"learning_rate": 2.7927927927927926e-06,
"loss": 2.9902,
"step": 831
},
{
"epoch": 7.5,
"learning_rate": 2.782782782782783e-06,
"loss": 3.2695,
"step": 832
},
{
"epoch": 7.5,
"learning_rate": 2.7727727727727734e-06,
"loss": 3.3906,
"step": 833
},
{
"epoch": 7.51,
"learning_rate": 2.7627627627627628e-06,
"loss": 3.3496,
"step": 834
},
{
"epoch": 7.52,
"learning_rate": 2.752752752752753e-06,
"loss": 2.9102,
"step": 835
},
{
"epoch": 7.53,
"learning_rate": 2.7427427427427427e-06,
"loss": 3.0898,
"step": 836
},
{
"epoch": 7.54,
"learning_rate": 2.732732732732733e-06,
"loss": 3.2207,
"step": 837
},
{
"epoch": 7.55,
"learning_rate": 2.722722722722723e-06,
"loss": 3.3047,
"step": 838
},
{
"epoch": 7.56,
"learning_rate": 2.712712712712713e-06,
"loss": 2.8613,
"step": 839
},
{
"epoch": 7.57,
"learning_rate": 2.702702702702703e-06,
"loss": 3.2227,
"step": 840
},
{
"epoch": 7.58,
"learning_rate": 2.6926926926926928e-06,
"loss": 3.1914,
"step": 841
},
{
"epoch": 7.59,
"learning_rate": 2.682682682682683e-06,
"loss": 3.0918,
"step": 842
},
{
"epoch": 7.59,
"learning_rate": 2.672672672672673e-06,
"loss": 3.0059,
"step": 843
},
{
"epoch": 7.6,
"learning_rate": 2.662662662662663e-06,
"loss": 2.9746,
"step": 844
},
{
"epoch": 7.61,
"learning_rate": 2.652652652652653e-06,
"loss": 3.1934,
"step": 845
},
{
"epoch": 7.62,
"learning_rate": 2.642642642642643e-06,
"loss": 2.9531,
"step": 846
},
{
"epoch": 7.63,
"learning_rate": 2.632632632632633e-06,
"loss": 3.3906,
"step": 847
},
{
"epoch": 7.64,
"learning_rate": 2.6226226226226224e-06,
"loss": 3.2422,
"step": 848
},
{
"epoch": 7.65,
"learning_rate": 2.612612612612613e-06,
"loss": 2.9336,
"step": 849
},
{
"epoch": 7.66,
"learning_rate": 2.602602602602603e-06,
"loss": 3.0273,
"step": 850
},
{
"epoch": 7.67,
"learning_rate": 2.5925925925925925e-06,
"loss": 3.252,
"step": 851
},
{
"epoch": 7.68,
"learning_rate": 2.5825825825825827e-06,
"loss": 3.2461,
"step": 852
},
{
"epoch": 7.68,
"learning_rate": 2.5725725725725724e-06,
"loss": 2.9727,
"step": 853
},
{
"epoch": 7.69,
"learning_rate": 2.5625625625625626e-06,
"loss": 3.0586,
"step": 854
},
{
"epoch": 7.7,
"learning_rate": 2.552552552552553e-06,
"loss": 3.1895,
"step": 855
},
{
"epoch": 7.71,
"learning_rate": 2.5425425425425426e-06,
"loss": 3.0,
"step": 856
},
{
"epoch": 7.72,
"learning_rate": 2.5325325325325327e-06,
"loss": 3.0547,
"step": 857
},
{
"epoch": 7.73,
"learning_rate": 2.5225225225225225e-06,
"loss": 2.9512,
"step": 858
},
{
"epoch": 7.74,
"learning_rate": 2.5125125125125127e-06,
"loss": 3.3359,
"step": 859
},
{
"epoch": 7.75,
"learning_rate": 2.502502502502503e-06,
"loss": 3.3691,
"step": 860
},
{
"epoch": 7.76,
"learning_rate": 2.4924924924924926e-06,
"loss": 3.0547,
"step": 861
},
{
"epoch": 7.77,
"learning_rate": 2.482482482482483e-06,
"loss": 3.0781,
"step": 862
},
{
"epoch": 7.77,
"learning_rate": 2.4724724724724726e-06,
"loss": 3.0703,
"step": 863
},
{
"epoch": 7.78,
"learning_rate": 2.4624624624624628e-06,
"loss": 3.4199,
"step": 864
},
{
"epoch": 7.79,
"learning_rate": 2.4524524524524525e-06,
"loss": 2.8555,
"step": 865
},
{
"epoch": 7.8,
"learning_rate": 2.4424424424424427e-06,
"loss": 3.0898,
"step": 866
},
{
"epoch": 7.81,
"learning_rate": 2.432432432432433e-06,
"loss": 3.0234,
"step": 867
},
{
"epoch": 7.82,
"learning_rate": 2.4224224224224226e-06,
"loss": 3.1465,
"step": 868
},
{
"epoch": 7.83,
"learning_rate": 2.412412412412413e-06,
"loss": 3.1348,
"step": 869
},
{
"epoch": 7.84,
"learning_rate": 2.4024024024024026e-06,
"loss": 3.0078,
"step": 870
},
{
"epoch": 7.85,
"learning_rate": 2.3923923923923923e-06,
"loss": 2.8633,
"step": 871
},
{
"epoch": 7.86,
"learning_rate": 2.3823823823823825e-06,
"loss": 2.9688,
"step": 872
},
{
"epoch": 7.86,
"learning_rate": 2.3723723723723727e-06,
"loss": 2.7852,
"step": 873
},
{
"epoch": 7.87,
"learning_rate": 2.3623623623623625e-06,
"loss": 3.0371,
"step": 874
},
{
"epoch": 7.88,
"learning_rate": 2.3523523523523527e-06,
"loss": 3.2422,
"step": 875
},
{
"epoch": 7.89,
"learning_rate": 2.3423423423423424e-06,
"loss": 2.9746,
"step": 876
},
{
"epoch": 7.9,
"learning_rate": 2.3323323323323326e-06,
"loss": 2.9355,
"step": 877
},
{
"epoch": 7.91,
"learning_rate": 2.3223223223223228e-06,
"loss": 2.9688,
"step": 878
},
{
"epoch": 7.92,
"learning_rate": 2.3123123123123125e-06,
"loss": 2.6152,
"step": 879
},
{
"epoch": 7.93,
"learning_rate": 2.3023023023023023e-06,
"loss": 2.8555,
"step": 880
},
{
"epoch": 7.94,
"learning_rate": 2.2922922922922925e-06,
"loss": 3.0488,
"step": 881
},
{
"epoch": 7.95,
"learning_rate": 2.2822822822822822e-06,
"loss": 3.2402,
"step": 882
},
{
"epoch": 7.95,
"learning_rate": 2.2722722722722724e-06,
"loss": 3.1094,
"step": 883
},
{
"epoch": 7.96,
"learning_rate": 2.2622622622622626e-06,
"loss": 3.1758,
"step": 884
},
{
"epoch": 7.97,
"learning_rate": 2.2522522522522524e-06,
"loss": 3.1016,
"step": 885
},
{
"epoch": 7.98,
"learning_rate": 2.2422422422422426e-06,
"loss": 3.3242,
"step": 886
},
{
"epoch": 7.99,
"learning_rate": 2.2322322322322323e-06,
"loss": 3.0566,
"step": 887
},
{
"epoch": 8.0,
"learning_rate": 2.222222222222222e-06,
"loss": 3.0996,
"step": 888
},
{
"epoch": 8.01,
"learning_rate": 2.2122122122122127e-06,
"loss": 3.2637,
"step": 889
},
{
"epoch": 8.02,
"learning_rate": 2.2022022022022024e-06,
"loss": 2.7852,
"step": 890
},
{
"epoch": 8.03,
"learning_rate": 2.192192192192192e-06,
"loss": 3.0605,
"step": 891
},
{
"epoch": 8.04,
"learning_rate": 2.1821821821821824e-06,
"loss": 2.9648,
"step": 892
},
{
"epoch": 8.05,
"learning_rate": 2.172172172172172e-06,
"loss": 3.1934,
"step": 893
},
{
"epoch": 8.05,
"learning_rate": 2.1621621621621623e-06,
"loss": 2.5977,
"step": 894
},
{
"epoch": 8.06,
"learning_rate": 2.1521521521521525e-06,
"loss": 3.3105,
"step": 895
},
{
"epoch": 8.07,
"learning_rate": 2.1421421421421423e-06,
"loss": 3.0215,
"step": 896
},
{
"epoch": 8.08,
"learning_rate": 2.1321321321321325e-06,
"loss": 3.0879,
"step": 897
},
{
"epoch": 8.09,
"learning_rate": 2.1221221221221222e-06,
"loss": 3.3477,
"step": 898
},
{
"epoch": 8.1,
"learning_rate": 2.1121121121121124e-06,
"loss": 3.0391,
"step": 899
},
{
"epoch": 8.11,
"learning_rate": 2.102102102102102e-06,
"loss": 3.3457,
"step": 900
},
{
"epoch": 8.12,
"learning_rate": 2.0920920920920923e-06,
"loss": 3.2051,
"step": 901
},
{
"epoch": 8.13,
"learning_rate": 2.082082082082082e-06,
"loss": 3.0977,
"step": 902
},
{
"epoch": 8.14,
"learning_rate": 2.0720720720720723e-06,
"loss": 3.0762,
"step": 903
},
{
"epoch": 8.14,
"learning_rate": 2.062062062062062e-06,
"loss": 3.1738,
"step": 904
},
{
"epoch": 8.15,
"learning_rate": 2.0520520520520522e-06,
"loss": 3.082,
"step": 905
},
{
"epoch": 8.16,
"learning_rate": 2.0420420420420424e-06,
"loss": 3.2695,
"step": 906
},
{
"epoch": 8.17,
"learning_rate": 2.032032032032032e-06,
"loss": 3.1074,
"step": 907
},
{
"epoch": 8.18,
"learning_rate": 2.022022022022022e-06,
"loss": 2.9785,
"step": 908
},
{
"epoch": 8.19,
"learning_rate": 2.012012012012012e-06,
"loss": 2.9746,
"step": 909
},
{
"epoch": 8.2,
"learning_rate": 2.0020020020020023e-06,
"loss": 3.0391,
"step": 910
},
{
"epoch": 8.21,
"learning_rate": 1.991991991991992e-06,
"loss": 3.1855,
"step": 911
},
{
"epoch": 8.22,
"learning_rate": 1.9819819819819822e-06,
"loss": 3.0234,
"step": 912
},
{
"epoch": 8.23,
"learning_rate": 1.971971971971972e-06,
"loss": 3.3262,
"step": 913
},
{
"epoch": 8.23,
"learning_rate": 1.961961961961962e-06,
"loss": 3.1406,
"step": 914
},
{
"epoch": 8.24,
"learning_rate": 1.951951951951952e-06,
"loss": 3.0703,
"step": 915
},
{
"epoch": 8.25,
"learning_rate": 1.941941941941942e-06,
"loss": 2.9336,
"step": 916
},
{
"epoch": 8.26,
"learning_rate": 1.9319319319319323e-06,
"loss": 3.1562,
"step": 917
},
{
"epoch": 8.27,
"learning_rate": 1.921921921921922e-06,
"loss": 3.0879,
"step": 918
},
{
"epoch": 8.28,
"learning_rate": 1.911911911911912e-06,
"loss": 3.1445,
"step": 919
},
{
"epoch": 8.29,
"learning_rate": 1.901901901901902e-06,
"loss": 2.8867,
"step": 920
},
{
"epoch": 8.3,
"learning_rate": 1.8918918918918922e-06,
"loss": 3.0293,
"step": 921
},
{
"epoch": 8.31,
"learning_rate": 1.8818818818818822e-06,
"loss": 2.9102,
"step": 922
},
{
"epoch": 8.32,
"learning_rate": 1.871871871871872e-06,
"loss": 3.1582,
"step": 923
},
{
"epoch": 8.32,
"learning_rate": 1.861861861861862e-06,
"loss": 3.1016,
"step": 924
},
{
"epoch": 8.33,
"learning_rate": 1.8518518518518519e-06,
"loss": 2.8867,
"step": 925
},
{
"epoch": 8.34,
"learning_rate": 1.841841841841842e-06,
"loss": 3.0742,
"step": 926
},
{
"epoch": 8.35,
"learning_rate": 1.831831831831832e-06,
"loss": 3.1094,
"step": 927
},
{
"epoch": 8.36,
"learning_rate": 1.821821821821822e-06,
"loss": 3.0859,
"step": 928
},
{
"epoch": 8.37,
"learning_rate": 1.811811811811812e-06,
"loss": 2.9336,
"step": 929
},
{
"epoch": 8.38,
"learning_rate": 1.801801801801802e-06,
"loss": 3.084,
"step": 930
},
{
"epoch": 8.39,
"learning_rate": 1.7917917917917917e-06,
"loss": 3.3086,
"step": 931
},
{
"epoch": 8.4,
"learning_rate": 1.781781781781782e-06,
"loss": 3.0352,
"step": 932
},
{
"epoch": 8.41,
"learning_rate": 1.7717717717717719e-06,
"loss": 3.2168,
"step": 933
},
{
"epoch": 8.41,
"learning_rate": 1.7617617617617618e-06,
"loss": 3.2441,
"step": 934
},
{
"epoch": 8.42,
"learning_rate": 1.7517517517517518e-06,
"loss": 3.1504,
"step": 935
},
{
"epoch": 8.43,
"learning_rate": 1.7417417417417418e-06,
"loss": 3.1445,
"step": 936
},
{
"epoch": 8.44,
"learning_rate": 1.731731731731732e-06,
"loss": 3.0977,
"step": 937
},
{
"epoch": 8.45,
"learning_rate": 1.721721721721722e-06,
"loss": 3.0117,
"step": 938
},
{
"epoch": 8.46,
"learning_rate": 1.711711711711712e-06,
"loss": 3.1211,
"step": 939
},
{
"epoch": 8.47,
"learning_rate": 1.7017017017017019e-06,
"loss": 3.25,
"step": 940
},
{
"epoch": 8.48,
"learning_rate": 1.6916916916916916e-06,
"loss": 3.1152,
"step": 941
},
{
"epoch": 8.49,
"learning_rate": 1.681681681681682e-06,
"loss": 3.2031,
"step": 942
},
{
"epoch": 8.5,
"learning_rate": 1.6716716716716718e-06,
"loss": 3.207,
"step": 943
},
{
"epoch": 8.5,
"learning_rate": 1.6616616616616618e-06,
"loss": 3.1035,
"step": 944
},
{
"epoch": 8.51,
"learning_rate": 1.6516516516516517e-06,
"loss": 3.3203,
"step": 945
},
{
"epoch": 8.52,
"learning_rate": 1.6416416416416417e-06,
"loss": 3.1367,
"step": 946
},
{
"epoch": 8.53,
"learning_rate": 1.6316316316316317e-06,
"loss": 3.1328,
"step": 947
},
{
"epoch": 8.54,
"learning_rate": 1.6216216216216219e-06,
"loss": 2.9648,
"step": 948
},
{
"epoch": 8.55,
"learning_rate": 1.6116116116116118e-06,
"loss": 3.1465,
"step": 949
},
{
"epoch": 8.56,
"learning_rate": 1.6016016016016018e-06,
"loss": 3.0859,
"step": 950
},
{
"epoch": 8.57,
"learning_rate": 1.5915915915915916e-06,
"loss": 3.1348,
"step": 951
},
{
"epoch": 8.58,
"learning_rate": 1.5815815815815815e-06,
"loss": 2.9805,
"step": 952
},
{
"epoch": 8.59,
"learning_rate": 1.5715715715715717e-06,
"loss": 3.1348,
"step": 953
},
{
"epoch": 8.59,
"learning_rate": 1.5615615615615617e-06,
"loss": 3.0859,
"step": 954
},
{
"epoch": 8.6,
"learning_rate": 1.5515515515515517e-06,
"loss": 3.1016,
"step": 955
},
{
"epoch": 8.61,
"learning_rate": 1.5415415415415416e-06,
"loss": 3.3066,
"step": 956
},
{
"epoch": 8.62,
"learning_rate": 1.5315315315315316e-06,
"loss": 3.1855,
"step": 957
},
{
"epoch": 8.63,
"learning_rate": 1.5215215215215218e-06,
"loss": 3.0605,
"step": 958
},
{
"epoch": 8.64,
"learning_rate": 1.5115115115115118e-06,
"loss": 2.9766,
"step": 959
},
{
"epoch": 8.65,
"learning_rate": 1.5015015015015017e-06,
"loss": 2.9688,
"step": 960
},
{
"epoch": 8.66,
"learning_rate": 1.4914914914914915e-06,
"loss": 3.2266,
"step": 961
},
{
"epoch": 8.67,
"learning_rate": 1.4814814814814815e-06,
"loss": 3.0273,
"step": 962
},
{
"epoch": 8.68,
"learning_rate": 1.4714714714714714e-06,
"loss": 3.0,
"step": 963
},
{
"epoch": 8.68,
"learning_rate": 1.4614614614614616e-06,
"loss": 3.1621,
"step": 964
},
{
"epoch": 8.69,
"learning_rate": 1.4514514514514516e-06,
"loss": 3.2246,
"step": 965
},
{
"epoch": 8.7,
"learning_rate": 1.4414414414414416e-06,
"loss": 3.1602,
"step": 966
},
{
"epoch": 8.71,
"learning_rate": 1.4314314314314315e-06,
"loss": 2.9746,
"step": 967
},
{
"epoch": 8.72,
"learning_rate": 1.4214214214214215e-06,
"loss": 3.1367,
"step": 968
},
{
"epoch": 8.73,
"learning_rate": 1.4114114114114117e-06,
"loss": 3.0938,
"step": 969
},
{
"epoch": 8.74,
"learning_rate": 1.4014014014014016e-06,
"loss": 3.1406,
"step": 970
},
{
"epoch": 8.75,
"learning_rate": 1.3913913913913914e-06,
"loss": 2.9688,
"step": 971
},
{
"epoch": 8.76,
"learning_rate": 1.3813813813813814e-06,
"loss": 3.0723,
"step": 972
},
{
"epoch": 8.77,
"learning_rate": 1.3713713713713714e-06,
"loss": 3.0781,
"step": 973
},
{
"epoch": 8.77,
"learning_rate": 1.3613613613613615e-06,
"loss": 2.9102,
"step": 974
},
{
"epoch": 8.78,
"learning_rate": 1.3513513513513515e-06,
"loss": 3.0762,
"step": 975
},
{
"epoch": 8.79,
"learning_rate": 1.3413413413413415e-06,
"loss": 3.041,
"step": 976
},
{
"epoch": 8.8,
"learning_rate": 1.3313313313313315e-06,
"loss": 2.9746,
"step": 977
},
{
"epoch": 8.81,
"learning_rate": 1.3213213213213214e-06,
"loss": 3.1836,
"step": 978
},
{
"epoch": 8.82,
"learning_rate": 1.3113113113113112e-06,
"loss": 3.0781,
"step": 979
},
{
"epoch": 8.83,
"learning_rate": 1.3013013013013016e-06,
"loss": 3.3047,
"step": 980
},
{
"epoch": 8.84,
"learning_rate": 1.2912912912912913e-06,
"loss": 3.1387,
"step": 981
},
{
"epoch": 8.85,
"learning_rate": 1.2812812812812813e-06,
"loss": 2.9531,
"step": 982
},
{
"epoch": 8.86,
"learning_rate": 1.2712712712712713e-06,
"loss": 2.9512,
"step": 983
},
{
"epoch": 8.86,
"learning_rate": 1.2612612612612613e-06,
"loss": 3.125,
"step": 984
},
{
"epoch": 8.87,
"learning_rate": 1.2512512512512514e-06,
"loss": 3.2812,
"step": 985
},
{
"epoch": 8.88,
"learning_rate": 1.2412412412412414e-06,
"loss": 2.7305,
"step": 986
},
{
"epoch": 8.89,
"learning_rate": 1.2312312312312314e-06,
"loss": 3.1445,
"step": 987
},
{
"epoch": 8.9,
"learning_rate": 1.2212212212212213e-06,
"loss": 2.9609,
"step": 988
},
{
"epoch": 8.91,
"learning_rate": 1.2112112112112113e-06,
"loss": 3.3086,
"step": 989
},
{
"epoch": 8.92,
"learning_rate": 1.2012012012012013e-06,
"loss": 2.9453,
"step": 990
},
{
"epoch": 8.93,
"learning_rate": 1.1911911911911913e-06,
"loss": 3.2715,
"step": 991
},
{
"epoch": 8.94,
"learning_rate": 1.1811811811811812e-06,
"loss": 3.1953,
"step": 992
},
{
"epoch": 8.95,
"learning_rate": 1.1711711711711712e-06,
"loss": 3.3203,
"step": 993
},
{
"epoch": 8.95,
"learning_rate": 1.1611611611611614e-06,
"loss": 3.2871,
"step": 994
},
{
"epoch": 8.96,
"learning_rate": 1.1511511511511512e-06,
"loss": 2.6816,
"step": 995
},
{
"epoch": 8.97,
"learning_rate": 1.1411411411411411e-06,
"loss": 3.0547,
"step": 996
},
{
"epoch": 8.98,
"learning_rate": 1.1311311311311313e-06,
"loss": 3.0898,
"step": 997
},
{
"epoch": 8.99,
"learning_rate": 1.1211211211211213e-06,
"loss": 3.1035,
"step": 998
},
{
"epoch": 9.0,
"learning_rate": 1.111111111111111e-06,
"loss": 3.1172,
"step": 999
},
{
"epoch": 9.01,
"learning_rate": 1.1011011011011012e-06,
"loss": 3.0332,
"step": 1000
},
{
"epoch": 9.02,
"learning_rate": 1.0910910910910912e-06,
"loss": 3.1582,
"step": 1001
},
{
"epoch": 9.03,
"learning_rate": 1.0810810810810812e-06,
"loss": 3.0605,
"step": 1002
},
{
"epoch": 9.04,
"learning_rate": 1.0710710710710711e-06,
"loss": 3.0527,
"step": 1003
},
{
"epoch": 9.05,
"learning_rate": 1.0610610610610611e-06,
"loss": 3.0352,
"step": 1004
},
{
"epoch": 9.05,
"learning_rate": 1.051051051051051e-06,
"loss": 3.043,
"step": 1005
},
{
"epoch": 9.06,
"learning_rate": 1.041041041041041e-06,
"loss": 3.0273,
"step": 1006
},
{
"epoch": 9.07,
"learning_rate": 1.031031031031031e-06,
"loss": 3.2676,
"step": 1007
},
{
"epoch": 9.08,
"learning_rate": 1.0210210210210212e-06,
"loss": 3.2715,
"step": 1008
},
{
"epoch": 9.09,
"learning_rate": 1.011011011011011e-06,
"loss": 3.1211,
"step": 1009
},
{
"epoch": 9.1,
"learning_rate": 1.0010010010010011e-06,
"loss": 3.2715,
"step": 1010
},
{
"epoch": 9.11,
"learning_rate": 9.909909909909911e-07,
"loss": 3.25,
"step": 1011
},
{
"epoch": 9.12,
"learning_rate": 9.80980980980981e-07,
"loss": 3.0371,
"step": 1012
},
{
"epoch": 9.13,
"learning_rate": 9.70970970970971e-07,
"loss": 3.0762,
"step": 1013
},
{
"epoch": 9.14,
"learning_rate": 9.60960960960961e-07,
"loss": 2.8555,
"step": 1014
},
{
"epoch": 9.14,
"learning_rate": 9.50950950950951e-07,
"loss": 3.2031,
"step": 1015
},
{
"epoch": 9.15,
"learning_rate": 9.409409409409411e-07,
"loss": 2.9023,
"step": 1016
},
{
"epoch": 9.16,
"learning_rate": 9.30930930930931e-07,
"loss": 3.1836,
"step": 1017
},
{
"epoch": 9.17,
"learning_rate": 9.20920920920921e-07,
"loss": 2.9141,
"step": 1018
},
{
"epoch": 9.18,
"learning_rate": 9.10910910910911e-07,
"loss": 3.2656,
"step": 1019
},
{
"epoch": 9.19,
"learning_rate": 9.00900900900901e-07,
"loss": 2.998,
"step": 1020
},
{
"epoch": 9.2,
"learning_rate": 8.90890890890891e-07,
"loss": 3.0957,
"step": 1021
},
{
"epoch": 9.21,
"learning_rate": 8.808808808808809e-07,
"loss": 3.1465,
"step": 1022
},
{
"epoch": 9.22,
"learning_rate": 8.708708708708709e-07,
"loss": 3.0469,
"step": 1023
},
{
"epoch": 9.23,
"learning_rate": 8.60860860860861e-07,
"loss": 3.0664,
"step": 1024
},
{
"epoch": 9.23,
"learning_rate": 8.508508508508509e-07,
"loss": 3.1445,
"step": 1025
},
{
"epoch": 9.24,
"learning_rate": 8.40840840840841e-07,
"loss": 3.1855,
"step": 1026
},
{
"epoch": 9.25,
"learning_rate": 8.308308308308309e-07,
"loss": 3.2539,
"step": 1027
},
{
"epoch": 9.26,
"learning_rate": 8.208208208208208e-07,
"loss": 3.3203,
"step": 1028
},
{
"epoch": 9.27,
"learning_rate": 8.108108108108109e-07,
"loss": 3.0859,
"step": 1029
},
{
"epoch": 9.28,
"learning_rate": 8.008008008008009e-07,
"loss": 2.9492,
"step": 1030
},
{
"epoch": 9.29,
"learning_rate": 7.907907907907908e-07,
"loss": 3.3203,
"step": 1031
},
{
"epoch": 9.3,
"learning_rate": 7.807807807807808e-07,
"loss": 3.3242,
"step": 1032
},
{
"epoch": 9.31,
"learning_rate": 7.707707707707708e-07,
"loss": 3.2148,
"step": 1033
},
{
"epoch": 9.32,
"learning_rate": 7.607607607607609e-07,
"loss": 3.2109,
"step": 1034
},
{
"epoch": 9.32,
"learning_rate": 7.507507507507509e-07,
"loss": 3.1191,
"step": 1035
},
{
"epoch": 9.33,
"learning_rate": 7.407407407407407e-07,
"loss": 2.9668,
"step": 1036
},
{
"epoch": 9.34,
"learning_rate": 7.307307307307308e-07,
"loss": 2.9512,
"step": 1037
},
{
"epoch": 9.35,
"learning_rate": 7.207207207207208e-07,
"loss": 3.1797,
"step": 1038
},
{
"epoch": 9.36,
"learning_rate": 7.107107107107107e-07,
"loss": 3.2422,
"step": 1039
},
{
"epoch": 9.37,
"learning_rate": 7.007007007007008e-07,
"loss": 3.6113,
"step": 1040
},
{
"epoch": 9.38,
"learning_rate": 6.906906906906907e-07,
"loss": 3.0977,
"step": 1041
},
{
"epoch": 9.39,
"learning_rate": 6.806806806806808e-07,
"loss": 3.1602,
"step": 1042
},
{
"epoch": 9.4,
"learning_rate": 6.706706706706707e-07,
"loss": 3.0625,
"step": 1043
},
{
"epoch": 9.41,
"learning_rate": 6.606606606606607e-07,
"loss": 3.2148,
"step": 1044
},
{
"epoch": 9.41,
"learning_rate": 6.506506506506508e-07,
"loss": 3.1816,
"step": 1045
},
{
"epoch": 9.42,
"learning_rate": 6.406406406406407e-07,
"loss": 2.9043,
"step": 1046
},
{
"epoch": 9.43,
"learning_rate": 6.306306306306306e-07,
"loss": 2.9688,
"step": 1047
},
{
"epoch": 9.44,
"learning_rate": 6.206206206206207e-07,
"loss": 2.9297,
"step": 1048
},
{
"epoch": 9.45,
"learning_rate": 6.106106106106107e-07,
"loss": 3.1797,
"step": 1049
},
{
"epoch": 9.46,
"learning_rate": 6.006006006006006e-07,
"loss": 3.1992,
"step": 1050
},
{
"epoch": 9.47,
"learning_rate": 5.905905905905906e-07,
"loss": 3.1641,
"step": 1051
},
{
"epoch": 9.48,
"learning_rate": 5.805805805805807e-07,
"loss": 3.1289,
"step": 1052
},
{
"epoch": 9.49,
"learning_rate": 5.705705705705706e-07,
"loss": 2.8652,
"step": 1053
},
{
"epoch": 9.5,
"learning_rate": 5.605605605605606e-07,
"loss": 2.9512,
"step": 1054
},
{
"epoch": 9.5,
"learning_rate": 5.505505505505506e-07,
"loss": 3.0781,
"step": 1055
},
{
"epoch": 9.51,
"learning_rate": 5.405405405405406e-07,
"loss": 3.0703,
"step": 1056
},
{
"epoch": 9.52,
"learning_rate": 5.305305305305306e-07,
"loss": 2.9766,
"step": 1057
},
{
"epoch": 9.53,
"learning_rate": 5.205205205205205e-07,
"loss": 3.2637,
"step": 1058
},
{
"epoch": 9.54,
"learning_rate": 5.105105105105106e-07,
"loss": 3.416,
"step": 1059
},
{
"epoch": 9.55,
"learning_rate": 5.005005005005006e-07,
"loss": 3.1777,
"step": 1060
},
{
"epoch": 9.56,
"learning_rate": 4.904904904904905e-07,
"loss": 3.2129,
"step": 1061
},
{
"epoch": 9.57,
"learning_rate": 4.804804804804805e-07,
"loss": 3.0586,
"step": 1062
},
{
"epoch": 9.58,
"learning_rate": 4.7047047047047054e-07,
"loss": 3.1465,
"step": 1063
},
{
"epoch": 9.59,
"learning_rate": 4.604604604604605e-07,
"loss": 3.1895,
"step": 1064
},
{
"epoch": 9.59,
"learning_rate": 4.504504504504505e-07,
"loss": 3.2168,
"step": 1065
},
{
"epoch": 9.6,
"learning_rate": 4.4044044044044046e-07,
"loss": 3.1816,
"step": 1066
},
{
"epoch": 9.61,
"learning_rate": 4.304304304304305e-07,
"loss": 2.998,
"step": 1067
},
{
"epoch": 9.62,
"learning_rate": 4.204204204204205e-07,
"loss": 2.959,
"step": 1068
},
{
"epoch": 9.63,
"learning_rate": 4.104104104104104e-07,
"loss": 3.0352,
"step": 1069
},
{
"epoch": 9.64,
"learning_rate": 4.0040040040040045e-07,
"loss": 2.9121,
"step": 1070
},
{
"epoch": 9.65,
"learning_rate": 3.903903903903904e-07,
"loss": 3.1992,
"step": 1071
},
{
"epoch": 9.66,
"learning_rate": 3.8038038038038044e-07,
"loss": 2.9648,
"step": 1072
},
{
"epoch": 9.67,
"learning_rate": 3.7037037037037036e-07,
"loss": 3.0879,
"step": 1073
},
{
"epoch": 9.68,
"learning_rate": 3.603603603603604e-07,
"loss": 3.0254,
"step": 1074
},
{
"epoch": 9.68,
"learning_rate": 3.503503503503504e-07,
"loss": 3.3438,
"step": 1075
},
{
"epoch": 9.69,
"learning_rate": 3.403403403403404e-07,
"loss": 2.9453,
"step": 1076
},
{
"epoch": 9.7,
"learning_rate": 3.3033033033033036e-07,
"loss": 3.0859,
"step": 1077
},
{
"epoch": 9.71,
"learning_rate": 3.2032032032032033e-07,
"loss": 3.1855,
"step": 1078
},
{
"epoch": 9.72,
"learning_rate": 3.1031031031031035e-07,
"loss": 3.1133,
"step": 1079
},
{
"epoch": 9.73,
"learning_rate": 3.003003003003003e-07,
"loss": 3.2695,
"step": 1080
},
{
"epoch": 9.74,
"learning_rate": 2.9029029029029035e-07,
"loss": 3.4102,
"step": 1081
},
{
"epoch": 9.75,
"learning_rate": 2.802802802802803e-07,
"loss": 3.25,
"step": 1082
},
{
"epoch": 9.76,
"learning_rate": 2.702702702702703e-07,
"loss": 3.3594,
"step": 1083
},
{
"epoch": 9.77,
"learning_rate": 2.6026026026026026e-07,
"loss": 2.8496,
"step": 1084
},
{
"epoch": 9.77,
"learning_rate": 2.502502502502503e-07,
"loss": 2.8184,
"step": 1085
},
{
"epoch": 9.78,
"learning_rate": 2.4024024024024026e-07,
"loss": 3.0254,
"step": 1086
},
{
"epoch": 9.79,
"learning_rate": 2.3023023023023026e-07,
"loss": 3.3359,
"step": 1087
},
{
"epoch": 9.8,
"learning_rate": 2.2022022022022023e-07,
"loss": 3.1543,
"step": 1088
},
{
"epoch": 9.81,
"learning_rate": 2.1021021021021025e-07,
"loss": 3.2344,
"step": 1089
},
{
"epoch": 9.82,
"learning_rate": 2.0020020020020022e-07,
"loss": 3.4766,
"step": 1090
},
{
"epoch": 9.83,
"learning_rate": 1.9019019019019022e-07,
"loss": 2.9961,
"step": 1091
},
{
"epoch": 9.84,
"learning_rate": 1.801801801801802e-07,
"loss": 3.3457,
"step": 1092
},
{
"epoch": 9.85,
"learning_rate": 1.701701701701702e-07,
"loss": 3.0371,
"step": 1093
},
{
"epoch": 9.86,
"learning_rate": 1.6016016016016016e-07,
"loss": 3.1602,
"step": 1094
},
{
"epoch": 9.86,
"learning_rate": 1.5015015015015016e-07,
"loss": 3.2129,
"step": 1095
},
{
"epoch": 9.87,
"learning_rate": 1.4014014014014016e-07,
"loss": 3.0781,
"step": 1096
},
{
"epoch": 9.88,
"learning_rate": 1.3013013013013013e-07,
"loss": 3.0586,
"step": 1097
},
{
"epoch": 9.89,
"learning_rate": 1.2012012012012013e-07,
"loss": 3.0957,
"step": 1098
},
{
"epoch": 9.9,
"learning_rate": 1.1011011011011011e-07,
"loss": 3.0625,
"step": 1099
},
{
"epoch": 9.91,
"learning_rate": 1.0010010010010011e-07,
"loss": 3.1738,
"step": 1100
},
{
"epoch": 9.92,
"learning_rate": 9.00900900900901e-08,
"loss": 3.3105,
"step": 1101
},
{
"epoch": 9.93,
"learning_rate": 8.008008008008008e-08,
"loss": 3.1055,
"step": 1102
},
{
"epoch": 9.94,
"learning_rate": 7.007007007007008e-08,
"loss": 3.3457,
"step": 1103
},
{
"epoch": 9.95,
"learning_rate": 6.006006006006006e-08,
"loss": 3.0449,
"step": 1104
},
{
"epoch": 9.95,
"learning_rate": 5.0050050050050056e-08,
"loss": 2.9277,
"step": 1105
},
{
"epoch": 9.96,
"learning_rate": 4.004004004004004e-08,
"loss": 3.2109,
"step": 1106
},
{
"epoch": 9.97,
"learning_rate": 3.003003003003003e-08,
"loss": 2.8184,
"step": 1107
},
{
"epoch": 9.98,
"learning_rate": 2.002002002002002e-08,
"loss": 3.0859,
"step": 1108
},
{
"epoch": 9.99,
"learning_rate": 1.001001001001001e-08,
"loss": 3.0234,
"step": 1109
},
{
"epoch": 10.0,
"learning_rate": 0.0,
"loss": 3.0234,
"step": 1110
},
{
"epoch": 10.0,
"step": 1110,
"total_flos": 6.08245709317079e+20,
"train_loss": 3.1787637246621623,
"train_runtime": 4372.4031,
"train_samples_per_second": 261.293,
"train_steps_per_second": 0.254
}
],
"max_steps": 1110,
"num_train_epochs": 10,
"total_flos": 6.08245709317079e+20,
"trial_name": null,
"trial_params": null
}