|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 1035, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.76923076923077e-07, |
|
"loss": 5.9375, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.153846153846154e-06, |
|
"loss": 5.9727, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.7307692307692308e-06, |
|
"loss": 5.8477, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.307692307692308e-06, |
|
"loss": 5.875, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.884615384615385e-06, |
|
"loss": 5.8477, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.4615384615384617e-06, |
|
"loss": 5.7422, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0384615384615385e-06, |
|
"loss": 5.918, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.615384615384616e-06, |
|
"loss": 5.7539, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.192307692307692e-06, |
|
"loss": 5.7891, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.76923076923077e-06, |
|
"loss": 5.6055, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.3461538461538466e-06, |
|
"loss": 5.6523, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.923076923076923e-06, |
|
"loss": 5.6758, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.5e-06, |
|
"loss": 5.3789, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.076923076923077e-06, |
|
"loss": 5.3594, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.653846153846153e-06, |
|
"loss": 5.0781, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.230769230769232e-06, |
|
"loss": 5.0273, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.807692307692308e-06, |
|
"loss": 4.9609, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0384615384615384e-05, |
|
"loss": 4.6562, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.0961538461538462e-05, |
|
"loss": 4.8047, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.153846153846154e-05, |
|
"loss": 4.7734, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2115384615384615e-05, |
|
"loss": 4.3438, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2692307692307693e-05, |
|
"loss": 4.4531, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.3269230769230769e-05, |
|
"loss": 4.8125, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.3846153846153847e-05, |
|
"loss": 4.2539, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4423076923076924e-05, |
|
"loss": 4.4336, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.5e-05, |
|
"loss": 3.9082, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.557692307692308e-05, |
|
"loss": 3.9883, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6153846153846154e-05, |
|
"loss": 4.293, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.673076923076923e-05, |
|
"loss": 3.9648, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.7307692307692306e-05, |
|
"loss": 3.5488, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.7884615384615384e-05, |
|
"loss": 3.9082, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 4.2891, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.903846153846154e-05, |
|
"loss": 3.3262, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9615384615384617e-05, |
|
"loss": 2.9336, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.0192307692307694e-05, |
|
"loss": 2.709, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.076923076923077e-05, |
|
"loss": 3.2617, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.1346153846153846e-05, |
|
"loss": 2.2539, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.1923076923076924e-05, |
|
"loss": 2.6367, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 2.25e-05, |
|
"loss": 2.4766, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.307692307692308e-05, |
|
"loss": 2.7012, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.3653846153846153e-05, |
|
"loss": 1.6719, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.423076923076923e-05, |
|
"loss": 2.0742, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2.4807692307692305e-05, |
|
"loss": 1.9346, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.5384615384615386e-05, |
|
"loss": 1.8262, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.5961538461538464e-05, |
|
"loss": 2.4004, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.6538461538461538e-05, |
|
"loss": 2.252, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.7115384615384616e-05, |
|
"loss": 1.9297, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.7692307692307694e-05, |
|
"loss": 3.1191, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.8269230769230768e-05, |
|
"loss": 2.5312, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.884615384615385e-05, |
|
"loss": 1.4062, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.9423076923076923e-05, |
|
"loss": 1.6602, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3e-05, |
|
"loss": 2.168, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 3.057692307692308e-05, |
|
"loss": 2.4062, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.115384615384616e-05, |
|
"loss": 2.0215, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.1730769230769234e-05, |
|
"loss": 1.752, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 3.230769230769231e-05, |
|
"loss": 1.8105, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.288461538461539e-05, |
|
"loss": 1.4023, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.346153846153846e-05, |
|
"loss": 1.5723, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.403846153846154e-05, |
|
"loss": 2.3945, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 3.461538461538461e-05, |
|
"loss": 1.1475, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.519230769230769e-05, |
|
"loss": 1.2188, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.576923076923077e-05, |
|
"loss": 1.4512, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 3.634615384615384e-05, |
|
"loss": 1.5293, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.692307692307693e-05, |
|
"loss": 1.8613, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 1.9375, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.807692307692308e-05, |
|
"loss": 2.5215, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 3.865384615384616e-05, |
|
"loss": 1.6035, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.923076923076923e-05, |
|
"loss": 1.8506, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.980769230769231e-05, |
|
"loss": 1.4258, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.038461538461539e-05, |
|
"loss": 1.1777, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.096153846153846e-05, |
|
"loss": 2.127, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.153846153846154e-05, |
|
"loss": 2.2969, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.211538461538461e-05, |
|
"loss": 1.6338, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.269230769230769e-05, |
|
"loss": 1.9004, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.3269230769230766e-05, |
|
"loss": 1.5615, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.384615384615385e-05, |
|
"loss": 2.1914, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.442307692307693e-05, |
|
"loss": 1.5537, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.5e-05, |
|
"loss": 0.98, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.557692307692308e-05, |
|
"loss": 1.8662, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.615384615384616e-05, |
|
"loss": 1.4648, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.673076923076923e-05, |
|
"loss": 0.8438, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.730769230769231e-05, |
|
"loss": 2.1777, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.788461538461539e-05, |
|
"loss": 1.8066, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.846153846153846e-05, |
|
"loss": 0.9365, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9038461538461536e-05, |
|
"loss": 1.8799, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.961538461538461e-05, |
|
"loss": 2.0762, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 5.01923076923077e-05, |
|
"loss": 1.6475, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.076923076923077e-05, |
|
"loss": 1.291, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.134615384615385e-05, |
|
"loss": 1.1172, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.192307692307693e-05, |
|
"loss": 1.5508, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 5.25e-05, |
|
"loss": 1.4297, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.3076923076923076e-05, |
|
"loss": 1.1201, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.365384615384616e-05, |
|
"loss": 1.5566, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 5.423076923076923e-05, |
|
"loss": 1.3662, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.4807692307692306e-05, |
|
"loss": 0.4919, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.538461538461539e-05, |
|
"loss": 1.7178, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.596153846153846e-05, |
|
"loss": 1.8799, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 5.6538461538461536e-05, |
|
"loss": 1.291, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.711538461538462e-05, |
|
"loss": 1.6123, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.76923076923077e-05, |
|
"loss": 0.8276, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 5.826923076923077e-05, |
|
"loss": 1.7314, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.8846153846153846e-05, |
|
"loss": 1.7471, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.942307692307693e-05, |
|
"loss": 0.7832, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 6e-05, |
|
"loss": 0.6196, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 5.993555316863588e-05, |
|
"loss": 1.041, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.987110633727175e-05, |
|
"loss": 2.1406, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.9806659505907625e-05, |
|
"loss": 1.7354, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 5.974221267454351e-05, |
|
"loss": 1.0576, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.967776584317938e-05, |
|
"loss": 2.1211, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.9613319011815255e-05, |
|
"loss": 0.9023, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.9548872180451125e-05, |
|
"loss": 3.0996, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 5.9484425349087e-05, |
|
"loss": 0.9351, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.941997851772288e-05, |
|
"loss": 1.3848, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.9355531686358755e-05, |
|
"loss": 2.2637, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 5.929108485499463e-05, |
|
"loss": 1.5088, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.922663802363051e-05, |
|
"loss": 1.9033, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.916219119226638e-05, |
|
"loss": 1.3701, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.9097744360902255e-05, |
|
"loss": 1.3613, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 5.903329752953813e-05, |
|
"loss": 1.418, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.896885069817401e-05, |
|
"loss": 1.0449, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.8904403866809885e-05, |
|
"loss": 0.811, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 5.883995703544576e-05, |
|
"loss": 1.3867, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.877551020408163e-05, |
|
"loss": 1.3105, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.871106337271751e-05, |
|
"loss": 1.8262, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 5.864661654135338e-05, |
|
"loss": 0.9292, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.858216970998926e-05, |
|
"loss": 1.0, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.851772287862514e-05, |
|
"loss": 1.6064, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.845327604726101e-05, |
|
"loss": 1.2256, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 5.8388829215896886e-05, |
|
"loss": 0.7412, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.832438238453276e-05, |
|
"loss": 0.7402, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.825993555316863e-05, |
|
"loss": 1.2227, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 5.8195488721804516e-05, |
|
"loss": 1.2969, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.813104189044039e-05, |
|
"loss": 1.7881, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.806659505907626e-05, |
|
"loss": 1.3223, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.800214822771214e-05, |
|
"loss": 0.8848, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 5.7937701396348016e-05, |
|
"loss": 0.8428, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.7873254564983886e-05, |
|
"loss": 0.8032, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.780880773361977e-05, |
|
"loss": 1.1318, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 5.774436090225564e-05, |
|
"loss": 2.0195, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.7679914070891516e-05, |
|
"loss": 1.4062, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.761546723952739e-05, |
|
"loss": 0.9395, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.755102040816326e-05, |
|
"loss": 1.5361, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 5.748657357679914e-05, |
|
"loss": 1.4014, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.742212674543502e-05, |
|
"loss": 0.6055, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.735767991407089e-05, |
|
"loss": 1.3877, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 5.729323308270677e-05, |
|
"loss": 0.6528, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.7228786251342646e-05, |
|
"loss": 0.7373, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.7164339419978516e-05, |
|
"loss": 0.8423, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.709989258861439e-05, |
|
"loss": 1.3916, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 5.7035445757250276e-05, |
|
"loss": 0.9614, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.6970998925886146e-05, |
|
"loss": 1.1592, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.690655209452202e-05, |
|
"loss": 0.8711, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5.684210526315789e-05, |
|
"loss": 1.3662, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.677765843179377e-05, |
|
"loss": 0.4143, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.6713211600429646e-05, |
|
"loss": 1.1768, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 5.6648764769065516e-05, |
|
"loss": 1.291, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.65843179377014e-05, |
|
"loss": 1.0244, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.6519871106337277e-05, |
|
"loss": 1.0244, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.6455424274973147e-05, |
|
"loss": 1.0889, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 5.639097744360902e-05, |
|
"loss": 1.4307, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.63265306122449e-05, |
|
"loss": 2.4375, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.626208378088077e-05, |
|
"loss": 0.5796, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.619763694951665e-05, |
|
"loss": 0.6992, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.613319011815253e-05, |
|
"loss": 1.3711, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.60687432867884e-05, |
|
"loss": 0.7881, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.600429645542428e-05, |
|
"loss": 2.0098, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 5.593984962406015e-05, |
|
"loss": 0.9419, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.5875402792696023e-05, |
|
"loss": 1.5947, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.581095596133191e-05, |
|
"loss": 1.7793, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.574650912996778e-05, |
|
"loss": 0.7998, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.5682062298603654e-05, |
|
"loss": 1.2207, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.561761546723953e-05, |
|
"loss": 1.0303, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.55531686358754e-05, |
|
"loss": 1.2314, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 5.548872180451128e-05, |
|
"loss": 0.6343, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.542427497314716e-05, |
|
"loss": 0.981, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.535982814178303e-05, |
|
"loss": 1.9785, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.529538131041891e-05, |
|
"loss": 1.0107, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5230934479054784e-05, |
|
"loss": 0.6196, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.5166487647690654e-05, |
|
"loss": 1.375, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.510204081632653e-05, |
|
"loss": 1.1621, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 5.503759398496241e-05, |
|
"loss": 1.2227, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.4973147153598284e-05, |
|
"loss": 1.2305, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.490870032223416e-05, |
|
"loss": 0.3601, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 5.484425349087003e-05, |
|
"loss": 0.6133, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.477980665950591e-05, |
|
"loss": 0.5396, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.4715359828141784e-05, |
|
"loss": 0.9004, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.465091299677766e-05, |
|
"loss": 1.3242, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.458646616541354e-05, |
|
"loss": 0.812, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.4522019334049414e-05, |
|
"loss": 1.8721, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.4457572502685284e-05, |
|
"loss": 0.6016, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 5.439312567132116e-05, |
|
"loss": 0.6221, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.432867883995704e-05, |
|
"loss": 1.2168, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.4264232008592914e-05, |
|
"loss": 1.8311, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 5.419978517722879e-05, |
|
"loss": 1.2402, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.413533834586466e-05, |
|
"loss": 1.415, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.407089151450054e-05, |
|
"loss": 1.042, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.4006444683136414e-05, |
|
"loss": 0.6216, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 5.3941997851772284e-05, |
|
"loss": 0.7007, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.387755102040817e-05, |
|
"loss": 0.5439, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.3813104189044045e-05, |
|
"loss": 1.8574, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 5.3748657357679914e-05, |
|
"loss": 1.1797, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.368421052631579e-05, |
|
"loss": 1.1494, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.361976369495167e-05, |
|
"loss": 1.1846, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.355531686358754e-05, |
|
"loss": 0.9653, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.3490870032223415e-05, |
|
"loss": 1.2695, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.34264232008593e-05, |
|
"loss": 1.083, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.336197636949517e-05, |
|
"loss": 2.4629, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 5.3297529538131045e-05, |
|
"loss": 1.0176, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.3233082706766915e-05, |
|
"loss": 1.6035, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.316863587540279e-05, |
|
"loss": 0.9111, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.310418904403867e-05, |
|
"loss": 0.6709, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 5.3039742212674545e-05, |
|
"loss": 0.792, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.297529538131042e-05, |
|
"loss": 0.3862, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.29108485499463e-05, |
|
"loss": 0.3044, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 5.284640171858217e-05, |
|
"loss": 0.7002, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.2781954887218045e-05, |
|
"loss": 0.4397, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.271750805585392e-05, |
|
"loss": 0.9385, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.26530612244898e-05, |
|
"loss": 0.7261, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 5.2588614393125675e-05, |
|
"loss": 0.853, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.252416756176155e-05, |
|
"loss": 0.9805, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.245972073039742e-05, |
|
"loss": 0.4788, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.23952738990333e-05, |
|
"loss": 1.4219, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.233082706766917e-05, |
|
"loss": 1.7207, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.226638023630505e-05, |
|
"loss": 1.7988, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.220193340494093e-05, |
|
"loss": 0.8208, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.21374865735768e-05, |
|
"loss": 1.1367, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.2073039742212675e-05, |
|
"loss": 0.6289, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.200859291084855e-05, |
|
"loss": 1.2578, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.194414607948442e-05, |
|
"loss": 0.9995, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1879699248120305e-05, |
|
"loss": 1.6768, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.181525241675618e-05, |
|
"loss": 1.3379, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.175080558539205e-05, |
|
"loss": 1.3545, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.168635875402793e-05, |
|
"loss": 0.9443, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.1621911922663806e-05, |
|
"loss": 1.3398, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.1557465091299675e-05, |
|
"loss": 1.4756, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.149301825993556e-05, |
|
"loss": 0.7642, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.142857142857143e-05, |
|
"loss": 1.7676, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.1364124597207306e-05, |
|
"loss": 1.2217, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 5.129967776584318e-05, |
|
"loss": 0.7295, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.123523093447905e-05, |
|
"loss": 0.5938, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.117078410311493e-05, |
|
"loss": 0.212, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.110633727175081e-05, |
|
"loss": 1.0762, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 5.104189044038668e-05, |
|
"loss": 0.9961, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.097744360902256e-05, |
|
"loss": 0.7412, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.0912996777658436e-05, |
|
"loss": 1.1523, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 5.0848549946294306e-05, |
|
"loss": 1.7646, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.078410311493018e-05, |
|
"loss": 0.4404, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.071965628356606e-05, |
|
"loss": 1.2734, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.0655209452201936e-05, |
|
"loss": 0.21, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 5.059076262083781e-05, |
|
"loss": 0.7417, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.052631578947368e-05, |
|
"loss": 0.8511, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.046186895810956e-05, |
|
"loss": 1.2627, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 5.0397422126745436e-05, |
|
"loss": 0.4741, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.0332975295381306e-05, |
|
"loss": 0.5249, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.026852846401719e-05, |
|
"loss": 0.2087, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.0204081632653066e-05, |
|
"loss": 1.501, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 5.0139634801288936e-05, |
|
"loss": 0.5781, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.007518796992481e-05, |
|
"loss": 0.5508, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 5.001074113856069e-05, |
|
"loss": 0.1661, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.994629430719656e-05, |
|
"loss": 1.1377, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.988184747583244e-05, |
|
"loss": 1.2031, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.981740064446831e-05, |
|
"loss": 0.791, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.975295381310419e-05, |
|
"loss": 1.9609, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.9688506981740066e-05, |
|
"loss": 2.9258, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.9624060150375936e-05, |
|
"loss": 0.7656, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.955961331901181e-05, |
|
"loss": 1.4219, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.9495166487647697e-05, |
|
"loss": 1.3262, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.9430719656283567e-05, |
|
"loss": 0.8892, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.936627282491944e-05, |
|
"loss": 1.1738, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.930182599355532e-05, |
|
"loss": 1.0635, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.923737916219119e-05, |
|
"loss": 1.4307, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.917293233082707e-05, |
|
"loss": 1.1562, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.910848549946295e-05, |
|
"loss": 1.2158, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.904403866809882e-05, |
|
"loss": 0.9302, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.89795918367347e-05, |
|
"loss": 1.5469, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.891514500537057e-05, |
|
"loss": 1.6465, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.8850698174006443e-05, |
|
"loss": 1.1904, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.878625134264232e-05, |
|
"loss": 1.7793, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.87218045112782e-05, |
|
"loss": 0.7744, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.8657357679914074e-05, |
|
"loss": 0.3062, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.859291084854995e-05, |
|
"loss": 1.5322, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.852846401718582e-05, |
|
"loss": 1.9072, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.84640171858217e-05, |
|
"loss": 1.5801, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.8399570354457574e-05, |
|
"loss": 0.9766, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.833512352309345e-05, |
|
"loss": 0.8052, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.827067669172933e-05, |
|
"loss": 1.1963, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.8206229860365204e-05, |
|
"loss": 1.0469, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.8141783029001074e-05, |
|
"loss": 0.4531, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.807733619763695e-05, |
|
"loss": 0.5645, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.801288936627282e-05, |
|
"loss": 1.5752, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.7948442534908704e-05, |
|
"loss": 0.3367, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.788399570354458e-05, |
|
"loss": 0.8164, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.781954887218045e-05, |
|
"loss": 0.6523, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.775510204081633e-05, |
|
"loss": 1.0527, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.7690655209452204e-05, |
|
"loss": 1.1582, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.7626208378088074e-05, |
|
"loss": 0.853, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.756176154672396e-05, |
|
"loss": 1.2627, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.7497314715359834e-05, |
|
"loss": 0.4578, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.7432867883995704e-05, |
|
"loss": 1.1797, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 2.209, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.730397422126746e-05, |
|
"loss": 0.7031, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.723952738990333e-05, |
|
"loss": 0.7183, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.7175080558539204e-05, |
|
"loss": 1.2344, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.711063372717508e-05, |
|
"loss": 0.9141, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.704618689581096e-05, |
|
"loss": 0.6128, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.6981740064446834e-05, |
|
"loss": 0.3938, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.6917293233082704e-05, |
|
"loss": 0.522, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 4.685284640171858e-05, |
|
"loss": 0.6768, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.678839957035446e-05, |
|
"loss": 0.8828, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.6723952738990334e-05, |
|
"loss": 1.3838, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.665950590762621e-05, |
|
"loss": 0.6724, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.659505907626209e-05, |
|
"loss": 1.3271, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.653061224489796e-05, |
|
"loss": 1.1445, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.6466165413533835e-05, |
|
"loss": 0.7754, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.640171858216971e-05, |
|
"loss": 1.7266, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.633727175080559e-05, |
|
"loss": 0.9507, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.6272824919441465e-05, |
|
"loss": 0.4897, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.6208378088077335e-05, |
|
"loss": 1.0723, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.614393125671321e-05, |
|
"loss": 1.7969, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.607948442534909e-05, |
|
"loss": 1.0342, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.601503759398496e-05, |
|
"loss": 0.6543, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 4.595059076262084e-05, |
|
"loss": 1.0791, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.588614393125672e-05, |
|
"loss": 1.0928, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.582169709989259e-05, |
|
"loss": 1.6904, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.5757250268528465e-05, |
|
"loss": 1.0898, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 4.569280343716434e-05, |
|
"loss": 0.8174, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.562835660580021e-05, |
|
"loss": 0.5107, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.5563909774436095e-05, |
|
"loss": 0.6909, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 4.549946294307197e-05, |
|
"loss": 0.9688, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.543501611170784e-05, |
|
"loss": 1.0, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.537056928034372e-05, |
|
"loss": 0.4773, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 4.530612244897959e-05, |
|
"loss": 1.3174, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5241675617615465e-05, |
|
"loss": 1.3652, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.517722878625135e-05, |
|
"loss": 1.2168, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.511278195488722e-05, |
|
"loss": 1.5312, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.5048335123523095e-05, |
|
"loss": 0.7524, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.498388829215897e-05, |
|
"loss": 1.25, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.491944146079484e-05, |
|
"loss": 0.9595, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 4.485499462943072e-05, |
|
"loss": 1.083, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.47905477980666e-05, |
|
"loss": 0.8052, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.472610096670247e-05, |
|
"loss": 0.9404, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.466165413533835e-05, |
|
"loss": 0.5723, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.4597207303974226e-05, |
|
"loss": 0.1422, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.4532760472610095e-05, |
|
"loss": 0.6899, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.446831364124597e-05, |
|
"loss": 0.5137, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.440386680988185e-05, |
|
"loss": 0.5693, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.4339419978517726e-05, |
|
"loss": 1.0957, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.42749731471536e-05, |
|
"loss": 0.7964, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.421052631578947e-05, |
|
"loss": 0.7773, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 4.414607948442535e-05, |
|
"loss": 0.5635, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4081632653061226e-05, |
|
"loss": 0.2981, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.4017185821697096e-05, |
|
"loss": 0.2563, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 4.395273899033298e-05, |
|
"loss": 0.5273, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.3888292158968856e-05, |
|
"loss": 0.7793, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.3823845327604726e-05, |
|
"loss": 0.5918, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.37593984962406e-05, |
|
"loss": 0.1747, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.369495166487648e-05, |
|
"loss": 0.8096, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.363050483351235e-05, |
|
"loss": 0.7832, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.356605800214823e-05, |
|
"loss": 0.874, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 4.35016111707841e-05, |
|
"loss": 0.7764, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.343716433941998e-05, |
|
"loss": 0.6079, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.3372717508055856e-05, |
|
"loss": 0.7559, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 4.3308270676691726e-05, |
|
"loss": 0.3838, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.32438238453276e-05, |
|
"loss": 0.7886, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.3179377013963486e-05, |
|
"loss": 0.7612, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.3114930182599356e-05, |
|
"loss": 1.0811, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.305048335123523e-05, |
|
"loss": 1.0557, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.298603651987111e-05, |
|
"loss": 0.8599, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.292158968850698e-05, |
|
"loss": 0.6172, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.8408, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.279269602577873e-05, |
|
"loss": 1.1182, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.272824919441461e-05, |
|
"loss": 0.8789, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.2663802363050486e-05, |
|
"loss": 0.543, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.2599355531686356e-05, |
|
"loss": 1.2139, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.253490870032223e-05, |
|
"loss": 0.5264, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.247046186895811e-05, |
|
"loss": 0.6206, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.2406015037593987e-05, |
|
"loss": 0.6782, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.234156820622986e-05, |
|
"loss": 0.6387, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.227712137486574e-05, |
|
"loss": 0.834, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.221267454350161e-05, |
|
"loss": 0.873, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.214822771213749e-05, |
|
"loss": 0.3003, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.208378088077336e-05, |
|
"loss": 0.6514, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.201933404940924e-05, |
|
"loss": 1.4746, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.195488721804512e-05, |
|
"loss": 0.5039, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.189044038668099e-05, |
|
"loss": 1.627, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.1825993555316863e-05, |
|
"loss": 0.8374, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.176154672395274e-05, |
|
"loss": 0.769, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.169709989258861e-05, |
|
"loss": 0.4521, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.1632653061224494e-05, |
|
"loss": 1.3594, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.156820622986037e-05, |
|
"loss": 0.6465, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.150375939849624e-05, |
|
"loss": 0.8965, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.143931256713212e-05, |
|
"loss": 0.8516, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.1374865735767994e-05, |
|
"loss": 0.5928, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.1310418904403864e-05, |
|
"loss": 0.6201, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.124597207303975e-05, |
|
"loss": 1.373, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.1181525241675624e-05, |
|
"loss": 0.3723, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.1117078410311494e-05, |
|
"loss": 0.3945, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.105263157894737e-05, |
|
"loss": 0.5054, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.098818474758324e-05, |
|
"loss": 1.4258, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.092373791621912e-05, |
|
"loss": 0.627, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.0859291084854994e-05, |
|
"loss": 0.687, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.079484425349087e-05, |
|
"loss": 1.1934, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.073039742212675e-05, |
|
"loss": 0.3296, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.0665950590762624e-05, |
|
"loss": 0.6445, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.0601503759398494e-05, |
|
"loss": 0.3169, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.053705692803437e-05, |
|
"loss": 0.7422, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.047261009667025e-05, |
|
"loss": 1.2725, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.0408163265306124e-05, |
|
"loss": 0.5264, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.0343716433942e-05, |
|
"loss": 0.9751, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.027926960257788e-05, |
|
"loss": 1.1699, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.021482277121375e-05, |
|
"loss": 0.417, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.0150375939849624e-05, |
|
"loss": 0.5767, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.0085929108485494e-05, |
|
"loss": 0.4243, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.002148227712138e-05, |
|
"loss": 1.4678, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.9957035445757254e-05, |
|
"loss": 1.1025, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.9892588614393124e-05, |
|
"loss": 1.2783, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.9828141783029e-05, |
|
"loss": 0.3447, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.976369495166488e-05, |
|
"loss": 0.4824, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.969924812030075e-05, |
|
"loss": 1.3311, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.963480128893663e-05, |
|
"loss": 0.5762, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.957035445757251e-05, |
|
"loss": 0.5205, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.950590762620838e-05, |
|
"loss": 0.4446, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.9441460794844255e-05, |
|
"loss": 0.2427, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.937701396348013e-05, |
|
"loss": 0.7974, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.9312567132116e-05, |
|
"loss": 0.3008, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.9248120300751885e-05, |
|
"loss": 1.2344, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.9183673469387755e-05, |
|
"loss": 0.6748, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.911922663802363e-05, |
|
"loss": 0.3877, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.905477980665951e-05, |
|
"loss": 1.0928, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.899033297529538e-05, |
|
"loss": 1.876, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.8925886143931255e-05, |
|
"loss": 0.9224, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.886143931256714e-05, |
|
"loss": 1.3135, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.879699248120301e-05, |
|
"loss": 0.8394, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.8732545649838885e-05, |
|
"loss": 0.8125, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.866809881847476e-05, |
|
"loss": 0.6909, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.860365198711063e-05, |
|
"loss": 1.6338, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.853920515574651e-05, |
|
"loss": 0.3625, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.847475832438239e-05, |
|
"loss": 1.2891, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.841031149301826e-05, |
|
"loss": 1.0586, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.834586466165414e-05, |
|
"loss": 0.6055, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.828141783029001e-05, |
|
"loss": 0.7695, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.8216970998925885e-05, |
|
"loss": 0.4434, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.815252416756176e-05, |
|
"loss": 0.8013, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.808807733619764e-05, |
|
"loss": 0.5269, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.8023630504833515e-05, |
|
"loss": 0.6255, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.795918367346939e-05, |
|
"loss": 1.5127, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.789473684210526e-05, |
|
"loss": 0.7939, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.783029001074114e-05, |
|
"loss": 0.6274, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.7765843179377015e-05, |
|
"loss": 0.8921, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.7701396348012885e-05, |
|
"loss": 0.7173, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.763694951664877e-05, |
|
"loss": 0.8696, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.7572502685284646e-05, |
|
"loss": 0.3567, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.7508055853920516e-05, |
|
"loss": 0.2025, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.744360902255639e-05, |
|
"loss": 0.1935, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.737916219119226e-05, |
|
"loss": 0.9375, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.731471535982814e-05, |
|
"loss": 0.7847, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.725026852846402e-05, |
|
"loss": 0.2959, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.718582169709989e-05, |
|
"loss": 0.873, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.712137486573577e-05, |
|
"loss": 0.3376, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.7056928034371646e-05, |
|
"loss": 0.2786, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.6992481203007516e-05, |
|
"loss": 0.5781, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.692803437164339e-05, |
|
"loss": 1.5342, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.6863587540279276e-05, |
|
"loss": 1.1494, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.6799140708915146e-05, |
|
"loss": 0.4146, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.673469387755102e-05, |
|
"loss": 0.6514, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.66702470461869e-05, |
|
"loss": 0.5601, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.660580021482277e-05, |
|
"loss": 0.9951, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.6541353383458646e-05, |
|
"loss": 0.3831, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.647690655209452e-05, |
|
"loss": 0.6987, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.64124597207304e-05, |
|
"loss": 0.6621, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.6348012889366276e-05, |
|
"loss": 0.7173, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.6283566058002146e-05, |
|
"loss": 1.0811, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.621911922663802e-05, |
|
"loss": 1.2783, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.61546723952739e-05, |
|
"loss": 1.5293, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.6090225563909776e-05, |
|
"loss": 1.3809, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.602577873254565e-05, |
|
"loss": 1.0098, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.596133190118153e-05, |
|
"loss": 0.6865, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.58968850698174e-05, |
|
"loss": 2.1152, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.5832438238453276e-05, |
|
"loss": 0.7798, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.576799140708915e-05, |
|
"loss": 0.9668, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.570354457572503e-05, |
|
"loss": 0.6685, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.5639097744360906e-05, |
|
"loss": 0.228, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.5574650912996776e-05, |
|
"loss": 1.3633, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.551020408163265e-05, |
|
"loss": 0.8384, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.544575725026853e-05, |
|
"loss": 1.4463, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.53813104189044e-05, |
|
"loss": 0.9956, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.531686358754028e-05, |
|
"loss": 0.9316, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.525241675617616e-05, |
|
"loss": 0.9541, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.518796992481203e-05, |
|
"loss": 0.1255, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.512352309344791e-05, |
|
"loss": 0.7798, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.505907626208378e-05, |
|
"loss": 0.1796, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.499462943071965e-05, |
|
"loss": 0.251, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.493018259935554e-05, |
|
"loss": 0.9492, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.4865735767991414e-05, |
|
"loss": 0.6108, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.4801288936627283e-05, |
|
"loss": 0.5967, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.473684210526316e-05, |
|
"loss": 0.8066, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.467239527389903e-05, |
|
"loss": 1.2207, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.460794844253491e-05, |
|
"loss": 0.8301, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.4543501611170784e-05, |
|
"loss": 1.124, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.447905477980666e-05, |
|
"loss": 0.5278, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 3.441460794844254e-05, |
|
"loss": 0.174, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.4350161117078414e-05, |
|
"loss": 0.5508, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.4285714285714284e-05, |
|
"loss": 0.9468, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.422126745435016e-05, |
|
"loss": 1.4697, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 3.415682062298604e-05, |
|
"loss": 1.375, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.4092373791621914e-05, |
|
"loss": 1.6553, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.402792696025779e-05, |
|
"loss": 0.9907, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.396348012889366e-05, |
|
"loss": 0.5063, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.389903329752954e-05, |
|
"loss": 1.9912, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.3834586466165414e-05, |
|
"loss": 0.4668, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.3770139634801284e-05, |
|
"loss": 0.6558, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 3.370569280343717e-05, |
|
"loss": 0.8813, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.3641245972073044e-05, |
|
"loss": 1.0352, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.3576799140708914e-05, |
|
"loss": 0.8276, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.351235230934479e-05, |
|
"loss": 1.2236, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.344790547798067e-05, |
|
"loss": 0.9565, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.338345864661654e-05, |
|
"loss": 0.9995, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.331901181525242e-05, |
|
"loss": 0.5166, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 3.32545649838883e-05, |
|
"loss": 0.5742, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.319011815252417e-05, |
|
"loss": 0.8345, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.3125671321160044e-05, |
|
"loss": 1.2949, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 3.3061224489795914e-05, |
|
"loss": 1.1328, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.299677765843179e-05, |
|
"loss": 0.7051, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.2932330827067674e-05, |
|
"loss": 0.8169, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.2867883995703544e-05, |
|
"loss": 1.0195, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 3.280343716433942e-05, |
|
"loss": 0.8081, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.27389903329753e-05, |
|
"loss": 0.7212, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.267454350161117e-05, |
|
"loss": 0.7681, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 3.2610096670247044e-05, |
|
"loss": 0.7163, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.254564983888293e-05, |
|
"loss": 0.9805, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.24812030075188e-05, |
|
"loss": 0.627, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.2416756176154675e-05, |
|
"loss": 1.0391, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 3.235230934479055e-05, |
|
"loss": 1.0283, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.228786251342642e-05, |
|
"loss": 2.1133, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.22234156820623e-05, |
|
"loss": 1.041, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 3.2158968850698175e-05, |
|
"loss": 0.7285, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.209452201933405e-05, |
|
"loss": 0.4043, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.203007518796993e-05, |
|
"loss": 0.5142, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 3.19656283566058e-05, |
|
"loss": 1.5195, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.1901181525241675e-05, |
|
"loss": 0.2092, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.183673469387755e-05, |
|
"loss": 0.5186, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.177228786251343e-05, |
|
"loss": 0.8418, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.1707841031149305e-05, |
|
"loss": 0.5356, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.164339419978518e-05, |
|
"loss": 0.647, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.157894736842105e-05, |
|
"loss": 0.6558, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.151450053705693e-05, |
|
"loss": 0.8794, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.1450053705692805e-05, |
|
"loss": 0.7026, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.1385606874328675e-05, |
|
"loss": 1.7617, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.132116004296456e-05, |
|
"loss": 0.7349, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 3.125671321160043e-05, |
|
"loss": 0.3079, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.1192266380236305e-05, |
|
"loss": 0.4131, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.112781954887218e-05, |
|
"loss": 0.4033, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 3.106337271750805e-05, |
|
"loss": 0.665, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.099892588614393e-05, |
|
"loss": 0.6807, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.093447905477981e-05, |
|
"loss": 0.3914, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.087003222341568e-05, |
|
"loss": 0.2849, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 3.080558539205156e-05, |
|
"loss": 0.3276, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.0741138560687435e-05, |
|
"loss": 0.4121, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.0676691729323305e-05, |
|
"loss": 0.2688, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 3.061224489795918e-05, |
|
"loss": 0.1829, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.0547798066595066e-05, |
|
"loss": 0.5039, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.0483351235230936e-05, |
|
"loss": 0.4885, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.0418904403866812e-05, |
|
"loss": 0.7773, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 3.0354457572502682e-05, |
|
"loss": 0.4307, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.0290010741138562e-05, |
|
"loss": 0.8657, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.022556390977444e-05, |
|
"loss": 0.4141, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 3.016111707841031e-05, |
|
"loss": 0.2261, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.009667024704619e-05, |
|
"loss": 1.0059, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 3.0032223415682066e-05, |
|
"loss": 0.6479, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.996777658431794e-05, |
|
"loss": 1.0986, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.9903329752953812e-05, |
|
"loss": 0.4487, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.983888292158969e-05, |
|
"loss": 0.7275, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.9774436090225562e-05, |
|
"loss": 0.4023, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.970998925886144e-05, |
|
"loss": 0.217, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.9645542427497316e-05, |
|
"loss": 0.647, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.958109559613319e-05, |
|
"loss": 2.2598, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.9516648764769066e-05, |
|
"loss": 0.7632, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.9452201933404943e-05, |
|
"loss": 0.2375, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.9387755102040816e-05, |
|
"loss": 1.0186, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.932330827067669e-05, |
|
"loss": 0.3733, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.925886143931257e-05, |
|
"loss": 0.5693, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.9194414607948443e-05, |
|
"loss": 0.791, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.9129967776584316e-05, |
|
"loss": 1.4707, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.9065520945220196e-05, |
|
"loss": 0.6255, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.900107411385607e-05, |
|
"loss": 0.7905, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.8936627282491943e-05, |
|
"loss": 0.1816, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.887218045112782e-05, |
|
"loss": 0.5864, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.8807733619763696e-05, |
|
"loss": 0.5752, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.874328678839957e-05, |
|
"loss": 1.4062, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.8678839957035446e-05, |
|
"loss": 0.3428, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 2.8614393125671323e-05, |
|
"loss": 0.1882, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8549946294307196e-05, |
|
"loss": 0.2407, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8485499462943073e-05, |
|
"loss": 0.519, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8421052631578946e-05, |
|
"loss": 0.5439, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 2.8356605800214823e-05, |
|
"loss": 0.8413, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.82921589688507e-05, |
|
"loss": 0.627, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.8227712137486573e-05, |
|
"loss": 0.2878, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 2.816326530612245e-05, |
|
"loss": 1.4326, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.8098818474758327e-05, |
|
"loss": 0.7769, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.80343716433942e-05, |
|
"loss": 1.1152, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.7969924812030073e-05, |
|
"loss": 0.5122, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 2.7905477980665953e-05, |
|
"loss": 0.4219, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.7841031149301827e-05, |
|
"loss": 0.3728, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.77765843179377e-05, |
|
"loss": 0.9292, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.771213748657358e-05, |
|
"loss": 0.439, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.7647690655209454e-05, |
|
"loss": 0.4609, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.7583243823845327e-05, |
|
"loss": 0.5693, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.7518796992481204e-05, |
|
"loss": 0.6675, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.745435016111708e-05, |
|
"loss": 1.1182, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.7389903329752954e-05, |
|
"loss": 0.5806, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.732545649838883e-05, |
|
"loss": 1.4824, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 2.7261009667024707e-05, |
|
"loss": 0.8848, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.719656283566058e-05, |
|
"loss": 0.6616, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.7132116004296457e-05, |
|
"loss": 1.1816, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 2.706766917293233e-05, |
|
"loss": 0.7158, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.7003222341568207e-05, |
|
"loss": 1.0264, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.6938775510204084e-05, |
|
"loss": 1.3906, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.6874328678839957e-05, |
|
"loss": 0.8315, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 2.6809881847475834e-05, |
|
"loss": 0.325, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.6745435016111707e-05, |
|
"loss": 0.6333, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.6680988184747584e-05, |
|
"loss": 0.189, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.6616541353383457e-05, |
|
"loss": 0.5181, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.6552094522019334e-05, |
|
"loss": 0.7617, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.648764769065521e-05, |
|
"loss": 0.6064, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.6423200859291084e-05, |
|
"loss": 0.4485, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 2.635875402792696e-05, |
|
"loss": 1.4287, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.6294307196562838e-05, |
|
"loss": 0.5005, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.622986036519871e-05, |
|
"loss": 1.1748, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 2.6165413533834584e-05, |
|
"loss": 0.6528, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.6100966702470464e-05, |
|
"loss": 0.8218, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.6036519871106338e-05, |
|
"loss": 0.667, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.597207303974221e-05, |
|
"loss": 0.7935, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 2.590762620837809e-05, |
|
"loss": 0.564, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.5843179377013964e-05, |
|
"loss": 0.7573, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.5778732545649838e-05, |
|
"loss": 0.6895, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 2.5714285714285714e-05, |
|
"loss": 1.2871, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.564983888292159e-05, |
|
"loss": 0.2174, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.5585392051557465e-05, |
|
"loss": 0.4556, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.552094522019334e-05, |
|
"loss": 0.2581, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.5456498388829218e-05, |
|
"loss": 0.2334, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.539205155746509e-05, |
|
"loss": 0.9619, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.5327604726100968e-05, |
|
"loss": 0.8389, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.526315789473684e-05, |
|
"loss": 0.4641, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.5198711063372718e-05, |
|
"loss": 0.6216, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.5134264232008595e-05, |
|
"loss": 0.8599, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 2.5069817400644468e-05, |
|
"loss": 1.0166, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.5005370569280345e-05, |
|
"loss": 0.5615, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.494092373791622e-05, |
|
"loss": 0.2927, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.4876476906552095e-05, |
|
"loss": 0.2435, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 2.4812030075187968e-05, |
|
"loss": 1.791, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.4747583243823848e-05, |
|
"loss": 0.7222, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.468313641245972e-05, |
|
"loss": 0.5127, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 2.4618689581095595e-05, |
|
"loss": 1.2412, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.4554242749731475e-05, |
|
"loss": 0.5151, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.448979591836735e-05, |
|
"loss": 0.6201, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.4425349087003222e-05, |
|
"loss": 0.3262, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 2.43609022556391e-05, |
|
"loss": 0.9502, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.4296455424274975e-05, |
|
"loss": 0.8535, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.423200859291085e-05, |
|
"loss": 1.6621, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.4167561761546725e-05, |
|
"loss": 1.0186, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.4103114930182602e-05, |
|
"loss": 1.1162, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.4038668098818475e-05, |
|
"loss": 0.4033, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.3974221267454352e-05, |
|
"loss": 1.3906, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 2.3909774436090225e-05, |
|
"loss": 0.7471, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.3845327604726102e-05, |
|
"loss": 1.1973, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.378088077336198e-05, |
|
"loss": 0.5869, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.3716433941997852e-05, |
|
"loss": 0.9888, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.365198711063373e-05, |
|
"loss": 1.0742, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.3587540279269602e-05, |
|
"loss": 0.2125, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.352309344790548e-05, |
|
"loss": 1.0732, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 2.3458646616541352e-05, |
|
"loss": 0.4409, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.339419978517723e-05, |
|
"loss": 0.4382, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.3329752953813106e-05, |
|
"loss": 0.4102, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.326530612244898e-05, |
|
"loss": 0.6001, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.3200859291084856e-05, |
|
"loss": 0.2225, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.3136412459720732e-05, |
|
"loss": 0.4841, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.3071965628356606e-05, |
|
"loss": 0.3896, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.300751879699248e-05, |
|
"loss": 0.8467, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.294307196562836e-05, |
|
"loss": 0.6938, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.2878625134264232e-05, |
|
"loss": 0.6113, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.2814178302900106e-05, |
|
"loss": 0.6523, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.2749731471535986e-05, |
|
"loss": 1.002, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.268528464017186e-05, |
|
"loss": 0.1422, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 2.2620837808807733e-05, |
|
"loss": 0.4534, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.255639097744361e-05, |
|
"loss": 0.2925, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.2491944146079486e-05, |
|
"loss": 0.5317, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.242749731471536e-05, |
|
"loss": 0.8975, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 2.2363050483351236e-05, |
|
"loss": 1.1289, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.2298603651987113e-05, |
|
"loss": 0.7373, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.2234156820622986e-05, |
|
"loss": 0.2634, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 2.2169709989258863e-05, |
|
"loss": 0.1131, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.2105263157894736e-05, |
|
"loss": 0.8384, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.2040816326530613e-05, |
|
"loss": 0.2357, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.197636949516649e-05, |
|
"loss": 0.4929, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 2.1911922663802363e-05, |
|
"loss": 1.3955, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.184747583243824e-05, |
|
"loss": 0.3135, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.1783029001074116e-05, |
|
"loss": 0.4622, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.171858216970999e-05, |
|
"loss": 0.5776, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.1654135338345863e-05, |
|
"loss": 1.3086, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.1589688506981743e-05, |
|
"loss": 0.0809, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.1525241675617616e-05, |
|
"loss": 0.192, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 2.146079484425349e-05, |
|
"loss": 0.6079, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.1396348012889367e-05, |
|
"loss": 1.252, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.1331901181525243e-05, |
|
"loss": 0.8066, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 2.1267454350161117e-05, |
|
"loss": 0.427, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.1203007518796993e-05, |
|
"loss": 0.3782, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.113856068743287e-05, |
|
"loss": 0.5156, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 2.1074113856068743e-05, |
|
"loss": 0.2861, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.100966702470462e-05, |
|
"loss": 0.7266, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.0945220193340493e-05, |
|
"loss": 0.7905, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.088077336197637e-05, |
|
"loss": 0.5312, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 2.0816326530612247e-05, |
|
"loss": 0.3154, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.075187969924812e-05, |
|
"loss": 0.2529, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.0687432867883997e-05, |
|
"loss": 0.5879, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 2.0622986036519874e-05, |
|
"loss": 1.0176, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.0558539205155747e-05, |
|
"loss": 0.6187, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.049409237379162e-05, |
|
"loss": 1.1318, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.0429645542427497e-05, |
|
"loss": 0.689, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.0365198711063374e-05, |
|
"loss": 1.5176, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.0300751879699247e-05, |
|
"loss": 0.1791, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.0236305048335124e-05, |
|
"loss": 0.6118, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 2.0171858216971e-05, |
|
"loss": 0.7197, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.0107411385606874e-05, |
|
"loss": 0.2455, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 2.0042964554242747e-05, |
|
"loss": 0.6484, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.9978517722878627e-05, |
|
"loss": 0.4783, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.99140708915145e-05, |
|
"loss": 0.3025, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.9849624060150374e-05, |
|
"loss": 0.5659, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.9785177228786254e-05, |
|
"loss": 0.5, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.9720730397422127e-05, |
|
"loss": 1.1221, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.9656283566058e-05, |
|
"loss": 1.127, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.9591836734693877e-05, |
|
"loss": 0.8398, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.9527389903329754e-05, |
|
"loss": 0.6069, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 1.9462943071965627e-05, |
|
"loss": 0.6099, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.9398496240601504e-05, |
|
"loss": 0.3914, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.933404940923738e-05, |
|
"loss": 0.478, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 1.9269602577873254e-05, |
|
"loss": 0.4048, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.920515574650913e-05, |
|
"loss": 0.4072, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.9140708915145004e-05, |
|
"loss": 0.2915, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.907626208378088e-05, |
|
"loss": 0.2668, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 1.9011815252416758e-05, |
|
"loss": 0.2759, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 0.7534, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.8882921589688508e-05, |
|
"loss": 0.2269, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.8818474758324384e-05, |
|
"loss": 0.5571, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.8754027926960258e-05, |
|
"loss": 0.1215, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.868958109559613e-05, |
|
"loss": 0.749, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 1.862513426423201e-05, |
|
"loss": 0.4685, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8560687432867885e-05, |
|
"loss": 0.6719, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8496240601503758e-05, |
|
"loss": 0.3398, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.8431793770139638e-05, |
|
"loss": 1.0566, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 1.836734693877551e-05, |
|
"loss": 0.3513, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.8302900107411385e-05, |
|
"loss": 0.7632, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.823845327604726e-05, |
|
"loss": 0.1722, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 1.8174006444683138e-05, |
|
"loss": 1.0508, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.810955961331901e-05, |
|
"loss": 0.394, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.8045112781954888e-05, |
|
"loss": 1.0664, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.7980665950590765e-05, |
|
"loss": 0.793, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 1.7916219119226638e-05, |
|
"loss": 0.1925, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.7851772287862515e-05, |
|
"loss": 0.2903, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.7787325456498388e-05, |
|
"loss": 0.1981, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 1.7722878625134265e-05, |
|
"loss": 0.7808, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.765843179377014e-05, |
|
"loss": 1.5342, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.7593984962406015e-05, |
|
"loss": 0.2656, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.752953813104189e-05, |
|
"loss": 0.408, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 1.746509129967777e-05, |
|
"loss": 0.4575, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.7400644468313642e-05, |
|
"loss": 0.5376, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.7336197636949515e-05, |
|
"loss": 0.6138, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.7271750805585392e-05, |
|
"loss": 0.6538, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.720730397422127e-05, |
|
"loss": 0.3455, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 2.2188, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.707841031149302e-05, |
|
"loss": 0.8359, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 1.7013963480128895e-05, |
|
"loss": 0.4607, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.694951664876477e-05, |
|
"loss": 0.5005, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.6885069817400642e-05, |
|
"loss": 0.8154, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 1.6820622986036522e-05, |
|
"loss": 0.561, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.6756176154672395e-05, |
|
"loss": 0.7104, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.669172932330827e-05, |
|
"loss": 0.1943, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 1.662728249194415e-05, |
|
"loss": 1.1729, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.6562835660580022e-05, |
|
"loss": 0.3677, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.6498388829215895e-05, |
|
"loss": 0.2639, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.6433941997851772e-05, |
|
"loss": 0.6929, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 1.636949516648765e-05, |
|
"loss": 0.243, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.6305048335123522e-05, |
|
"loss": 0.7842, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.62406015037594e-05, |
|
"loss": 1.2842, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 1.6176154672395276e-05, |
|
"loss": 0.6558, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.611170784103115e-05, |
|
"loss": 0.647, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.6047261009667026e-05, |
|
"loss": 0.3364, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.59828141783029e-05, |
|
"loss": 0.45, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 1.5918367346938776e-05, |
|
"loss": 1.127, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.5853920515574652e-05, |
|
"loss": 0.666, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.5789473684210526e-05, |
|
"loss": 0.0742, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.5725026852846403e-05, |
|
"loss": 0.897, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.566058002148228e-05, |
|
"loss": 0.0906, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.5596133190118153e-05, |
|
"loss": 0.79, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.5531686358754026e-05, |
|
"loss": 0.5742, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 1.5467239527389906e-05, |
|
"loss": 0.894, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.540279269602578e-05, |
|
"loss": 0.5088, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.5338345864661653e-05, |
|
"loss": 0.2896, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 1.5273899033297533e-05, |
|
"loss": 0.385, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.5209452201933406e-05, |
|
"loss": 0.7002, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.5145005370569281e-05, |
|
"loss": 0.2917, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.5080558539205154e-05, |
|
"loss": 0.3679, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 1.5016111707841033e-05, |
|
"loss": 0.2607, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.4951664876476906e-05, |
|
"loss": 0.5879, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.4887218045112781e-05, |
|
"loss": 0.6631, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 1.4822771213748658e-05, |
|
"loss": 0.5054, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.4758324382384533e-05, |
|
"loss": 0.6641, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.4693877551020408e-05, |
|
"loss": 0.9521, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.4629430719656285e-05, |
|
"loss": 1.9043, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 1.4564983888292158e-05, |
|
"loss": 1.0449, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.4500537056928035e-05, |
|
"loss": 0.5547, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.443609022556391e-05, |
|
"loss": 1.0, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 1.4371643394199785e-05, |
|
"loss": 0.5303, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.4307196562835662e-05, |
|
"loss": 0.4465, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.4242749731471537e-05, |
|
"loss": 0.6113, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.4178302900107412e-05, |
|
"loss": 0.1066, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.4113856068743287e-05, |
|
"loss": 0.3657, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.4049409237379163e-05, |
|
"loss": 0.8228, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.3984962406015037e-05, |
|
"loss": 1.3389, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 1.3920515574650913e-05, |
|
"loss": 0.311, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.385606874328679e-05, |
|
"loss": 0.4507, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.3791621911922663e-05, |
|
"loss": 0.1243, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 1.372717508055854e-05, |
|
"loss": 1.3535, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.3662728249194415e-05, |
|
"loss": 0.3181, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.359828141783029e-05, |
|
"loss": 0.5601, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.3533834586466165e-05, |
|
"loss": 0.4221, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 1.3469387755102042e-05, |
|
"loss": 0.5015, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.3404940923737917e-05, |
|
"loss": 0.4658, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.3340494092373792e-05, |
|
"loss": 0.6699, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 1.3276047261009667e-05, |
|
"loss": 0.647, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.3211600429645542e-05, |
|
"loss": 0.4644, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.3147153598281419e-05, |
|
"loss": 0.2188, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.3082706766917292e-05, |
|
"loss": 0.3911, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.3018259935553169e-05, |
|
"loss": 0.4988, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.2953813104189046e-05, |
|
"loss": 0.2781, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.2889366272824919e-05, |
|
"loss": 0.8247, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.2824919441460796e-05, |
|
"loss": 1.2646, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.276047261009667e-05, |
|
"loss": 1.1211, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.2696025778732546e-05, |
|
"loss": 1.5156, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.2452, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.2567132116004297e-05, |
|
"loss": 0.5449, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.2502685284640172e-05, |
|
"loss": 0.5576, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.2438238453276047e-05, |
|
"loss": 0.6338, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.2373791621911924e-05, |
|
"loss": 0.3057, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.2309344790547797e-05, |
|
"loss": 0.3623, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.2244897959183674e-05, |
|
"loss": 0.7378, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.218045112781955e-05, |
|
"loss": 0.4917, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.2116004296455424e-05, |
|
"loss": 0.8989, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.2051557465091301e-05, |
|
"loss": 0.4995, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.1987110633727176e-05, |
|
"loss": 0.2791, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.1922663802363051e-05, |
|
"loss": 0.6958, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.1858216970998926e-05, |
|
"loss": 1.3311, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.1793770139634801e-05, |
|
"loss": 1.2793, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.1729323308270676e-05, |
|
"loss": 1.166, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.1664876476906553e-05, |
|
"loss": 0.8516, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.1600429645542428e-05, |
|
"loss": 0.6934, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.1535982814178303e-05, |
|
"loss": 0.3757, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.147153598281418e-05, |
|
"loss": 0.2754, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.1407089151450053e-05, |
|
"loss": 0.9756, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.134264232008593e-05, |
|
"loss": 0.3345, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.1278195488721805e-05, |
|
"loss": 0.7622, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.121374865735768e-05, |
|
"loss": 0.212, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.1149301825993556e-05, |
|
"loss": 0.4236, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.1084854994629431e-05, |
|
"loss": 1.582, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.1020408163265306e-05, |
|
"loss": 1.0332, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0955961331901181e-05, |
|
"loss": 0.9839, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0891514500537058e-05, |
|
"loss": 0.8301, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.0827067669172932e-05, |
|
"loss": 2.4746, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0762620837808808e-05, |
|
"loss": 0.9419, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0698174006444683e-05, |
|
"loss": 0.5151, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0633727175080558e-05, |
|
"loss": 0.584, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.0569280343716435e-05, |
|
"loss": 0.7236, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.050483351235231e-05, |
|
"loss": 0.2401, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.0440386680988185e-05, |
|
"loss": 1.1973, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.037593984962406e-05, |
|
"loss": 0.5747, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0311493018259937e-05, |
|
"loss": 0.6992, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.024704618689581e-05, |
|
"loss": 0.4014, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0182599355531687e-05, |
|
"loss": 0.8003, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0118152524167562e-05, |
|
"loss": 0.3843, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.0053705692803437e-05, |
|
"loss": 0.8589, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.989258861439314e-06, |
|
"loss": 0.6914, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 9.924812030075187e-06, |
|
"loss": 0.2391, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.860365198711064e-06, |
|
"loss": 0.9551, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.795918367346939e-06, |
|
"loss": 0.7603, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 9.731471535982814e-06, |
|
"loss": 0.4573, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.66702470461869e-06, |
|
"loss": 0.7515, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.602577873254565e-06, |
|
"loss": 0.3679, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.53813104189044e-06, |
|
"loss": 1.2441, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.8926, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.409237379162192e-06, |
|
"loss": 0.9185, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.344790547798066e-06, |
|
"loss": 0.2751, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.280343716433942e-06, |
|
"loss": 0.9556, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.215896885069819e-06, |
|
"loss": 0.2913, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.151450053705692e-06, |
|
"loss": 0.5596, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.087003222341569e-06, |
|
"loss": 1.1748, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.022556390977444e-06, |
|
"loss": 0.3442, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.958109559613319e-06, |
|
"loss": 0.6567, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.893662728249194e-06, |
|
"loss": 1.2559, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.82921589688507e-06, |
|
"loss": 0.3486, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.764769065520946e-06, |
|
"loss": 0.7559, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.700322234156821e-06, |
|
"loss": 0.4709, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.635875402792696e-06, |
|
"loss": 0.8193, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.7148, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.506981740064448e-06, |
|
"loss": 0.5742, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.442534908700321e-06, |
|
"loss": 0.7822, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.378088077336198e-06, |
|
"loss": 0.7886, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.313641245972074e-06, |
|
"loss": 0.3855, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.249194414607948e-06, |
|
"loss": 0.9004, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.184747583243824e-06, |
|
"loss": 0.6431, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.1203007518797e-06, |
|
"loss": 1.0674, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 8.055853920515575e-06, |
|
"loss": 1.1387, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.99140708915145e-06, |
|
"loss": 0.5269, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.926960257787326e-06, |
|
"loss": 0.2634, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.862513426423201e-06, |
|
"loss": 0.3638, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.798066595059076e-06, |
|
"loss": 0.2739, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.733619763694953e-06, |
|
"loss": 0.448, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.669172932330826e-06, |
|
"loss": 1.1904, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.604726100966703e-06, |
|
"loss": 0.4287, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.540279269602577e-06, |
|
"loss": 0.9272, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.475832438238453e-06, |
|
"loss": 1.0234, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.411385606874329e-06, |
|
"loss": 0.6655, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.346938775510204e-06, |
|
"loss": 0.4514, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 7.282491944146079e-06, |
|
"loss": 0.2781, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.218045112781955e-06, |
|
"loss": 0.6792, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.153598281417831e-06, |
|
"loss": 0.8042, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.089151450053706e-06, |
|
"loss": 0.1783, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 7.024704618689582e-06, |
|
"loss": 0.3884, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.960257787325457e-06, |
|
"loss": 0.2115, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.895810955961332e-06, |
|
"loss": 0.3872, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 6.831364124597208e-06, |
|
"loss": 0.9307, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.766917293233083e-06, |
|
"loss": 0.7832, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.7024704618689585e-06, |
|
"loss": 0.3894, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.6380236305048335e-06, |
|
"loss": 0.2898, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 6.573576799140709e-06, |
|
"loss": 0.0709, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.509129967776584e-06, |
|
"loss": 0.165, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.4446831364124594e-06, |
|
"loss": 1.2168, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 6.380236305048335e-06, |
|
"loss": 0.2332, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 0.5137, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.251342642320086e-06, |
|
"loss": 0.7983, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.186895810955962e-06, |
|
"loss": 1.165, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 6.122448979591837e-06, |
|
"loss": 0.4495, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 6.058002148227712e-06, |
|
"loss": 1.4053, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.993555316863588e-06, |
|
"loss": 0.4312, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 5.929108485499463e-06, |
|
"loss": 0.5498, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.864661654135338e-06, |
|
"loss": 0.1597, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.800214822771214e-06, |
|
"loss": 0.2886, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.73576799140709e-06, |
|
"loss": 0.4109, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 5.671321160042965e-06, |
|
"loss": 0.5054, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.60687432867884e-06, |
|
"loss": 0.3936, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.542427497314716e-06, |
|
"loss": 0.3992, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 5.477980665950591e-06, |
|
"loss": 0.6304, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.413533834586466e-06, |
|
"loss": 0.6699, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.349087003222342e-06, |
|
"loss": 0.8867, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 5.2846401718582175e-06, |
|
"loss": 0.6978, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.2201933404940925e-06, |
|
"loss": 0.7178, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.155746509129968e-06, |
|
"loss": 0.7168, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.091299677765843e-06, |
|
"loss": 0.4736, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 5.0268528464017184e-06, |
|
"loss": 0.5347, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.9624060150375935e-06, |
|
"loss": 0.6855, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.897959183673469e-06, |
|
"loss": 0.7231, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 4.833512352309345e-06, |
|
"loss": 0.3286, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.76906552094522e-06, |
|
"loss": 0.2817, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.704618689581096e-06, |
|
"loss": 0.5527, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.640171858216971e-06, |
|
"loss": 0.2981, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 4.575725026852846e-06, |
|
"loss": 0.4341, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.511278195488722e-06, |
|
"loss": 0.7925, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.446831364124597e-06, |
|
"loss": 0.1185, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 4.382384532760473e-06, |
|
"loss": 0.1108, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.317937701396348e-06, |
|
"loss": 1.0967, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.253490870032224e-06, |
|
"loss": 0.2571, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.189044038668099e-06, |
|
"loss": 0.7031, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.124597207303974e-06, |
|
"loss": 0.9097, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.06015037593985e-06, |
|
"loss": 0.3716, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.995703544575725e-06, |
|
"loss": 0.3604, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 3.931256713211601e-06, |
|
"loss": 0.5859, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.8668098818474765e-06, |
|
"loss": 0.1636, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.8023630504833515e-06, |
|
"loss": 0.2457, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.7379162191192266e-06, |
|
"loss": 0.5952, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.673469387755102e-06, |
|
"loss": 0.3359, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.6090225563909775e-06, |
|
"loss": 0.4526, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.544575725026853e-06, |
|
"loss": 0.5771, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.4801288936627283e-06, |
|
"loss": 0.2944, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.415682062298604e-06, |
|
"loss": 0.6084, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.3512352309344792e-06, |
|
"loss": 0.6567, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.2867883995703547e-06, |
|
"loss": 0.3926, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.2223415682062297e-06, |
|
"loss": 0.9121, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 0.1667, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.093447905477981e-06, |
|
"loss": 1.0684, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 3.029001074113856e-06, |
|
"loss": 0.9414, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.9645542427497315e-06, |
|
"loss": 0.3645, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.900107411385607e-06, |
|
"loss": 1.1328, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 2.8356605800214824e-06, |
|
"loss": 0.5664, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.771213748657358e-06, |
|
"loss": 0.311, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.706766917293233e-06, |
|
"loss": 0.3245, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.6423200859291087e-06, |
|
"loss": 0.5825, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 2.577873254564984e-06, |
|
"loss": 0.1556, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.5134264232008592e-06, |
|
"loss": 0.7402, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.4489795918367347e-06, |
|
"loss": 0.1271, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 2.38453276047261e-06, |
|
"loss": 0.7485, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.3200859291084856e-06, |
|
"loss": 0.3857, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.255639097744361e-06, |
|
"loss": 0.5498, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.1911922663802365e-06, |
|
"loss": 0.8418, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 2.126745435016112e-06, |
|
"loss": 0.5854, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 2.062298603651987e-06, |
|
"loss": 0.7441, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.9978517722878624e-06, |
|
"loss": 0.5293, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.9334049409237383e-06, |
|
"loss": 0.8838, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.8689581095596133e-06, |
|
"loss": 0.5181, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.8045112781954887e-06, |
|
"loss": 0.8169, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.7400644468313642e-06, |
|
"loss": 0.6387, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.6756176154672396e-06, |
|
"loss": 1.4199, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.6111707841031149e-06, |
|
"loss": 0.7676, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.5467239527389905e-06, |
|
"loss": 0.4863, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 1.4822771213748658e-06, |
|
"loss": 0.5029, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.4178302900107412e-06, |
|
"loss": 0.5166, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.3533834586466164e-06, |
|
"loss": 0.4446, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.288936627282492e-06, |
|
"loss": 0.4229, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.2244897959183673e-06, |
|
"loss": 0.7046, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.1600429645542428e-06, |
|
"loss": 0.1925, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.0955961331901182e-06, |
|
"loss": 0.3721, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.0311493018259935e-06, |
|
"loss": 0.1495, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.667024704618691e-07, |
|
"loss": 0.4629, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.022556390977444e-07, |
|
"loss": 1.1387, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.378088077336198e-07, |
|
"loss": 1.4092, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.733619763694953e-07, |
|
"loss": 0.4365, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.089151450053706e-07, |
|
"loss": 1.0098, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.44468313641246e-07, |
|
"loss": 0.9849, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.800214822771214e-07, |
|
"loss": 0.4924, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 5.155746509129967e-07, |
|
"loss": 0.8003, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 4.511278195488722e-07, |
|
"loss": 0.7852, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.8668098818474763e-07, |
|
"loss": 0.0843, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 3.22234156820623e-07, |
|
"loss": 0.7656, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.5778732545649837e-07, |
|
"loss": 0.2798, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.9334049409237381e-07, |
|
"loss": 0.5186, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.2889366272824918e-07, |
|
"loss": 0.6855, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.444683136412459e-08, |
|
"loss": 0.3208, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.5391, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1035, |
|
"total_flos": 6.646386111817974e+18, |
|
"train_loss": 0.9942761997093901, |
|
"train_runtime": 437.6871, |
|
"train_samples_per_second": 606.762, |
|
"train_steps_per_second": 2.365 |
|
} |
|
], |
|
"max_steps": 1035, |
|
"num_train_epochs": 3, |
|
"total_flos": 6.646386111817974e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|