|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.28, |
|
"global_step": 1600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.7699115044247788e-07, |
|
"loss": 2.6342, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.5398230088495575e-07, |
|
"loss": 2.4942, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.309734513274336e-07, |
|
"loss": 2.4458, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.079646017699115e-07, |
|
"loss": 2.3624, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 8.849557522123895e-07, |
|
"loss": 2.2731, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0619469026548673e-06, |
|
"loss": 2.3463, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.2389380530973452e-06, |
|
"loss": 2.4769, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.415929203539823e-06, |
|
"loss": 2.3047, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.592920353982301e-06, |
|
"loss": 2.4436, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.769911504424779e-06, |
|
"loss": 2.2426, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.9469026548672567e-06, |
|
"loss": 2.3656, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.1238938053097345e-06, |
|
"loss": 2.3785, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.3008849557522127e-06, |
|
"loss": 2.3527, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.4778761061946905e-06, |
|
"loss": 2.2126, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.6548672566371687e-06, |
|
"loss": 2.3341, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.831858407079646e-06, |
|
"loss": 2.2448, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.0088495575221242e-06, |
|
"loss": 2.1304, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.185840707964602e-06, |
|
"loss": 2.2165, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.36283185840708e-06, |
|
"loss": 2.3042, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.539823008849558e-06, |
|
"loss": 2.2978, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.7168141592920357e-06, |
|
"loss": 2.2188, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.8938053097345135e-06, |
|
"loss": 2.1372, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.070796460176992e-06, |
|
"loss": 2.2388, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.247787610619469e-06, |
|
"loss": 2.1211, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.424778761061948e-06, |
|
"loss": 2.2362, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.6017699115044254e-06, |
|
"loss": 2.2629, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.778761061946903e-06, |
|
"loss": 2.3319, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.955752212389381e-06, |
|
"loss": 2.1444, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.132743362831859e-06, |
|
"loss": 2.0985, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.309734513274337e-06, |
|
"loss": 2.2057, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.486725663716814e-06, |
|
"loss": 2.2332, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.663716814159292e-06, |
|
"loss": 2.2764, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.840707964601771e-06, |
|
"loss": 2.2456, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.0176991150442484e-06, |
|
"loss": 2.0918, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.194690265486726e-06, |
|
"loss": 2.0425, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.371681415929204e-06, |
|
"loss": 2.2086, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.548672566371682e-06, |
|
"loss": 2.1651, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.72566371681416e-06, |
|
"loss": 2.1239, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.902654867256637e-06, |
|
"loss": 2.0779, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.079646017699116e-06, |
|
"loss": 2.1271, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.256637168141594e-06, |
|
"loss": 2.2926, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.4336283185840714e-06, |
|
"loss": 2.1001, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.610619469026549e-06, |
|
"loss": 2.2959, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.787610619469027e-06, |
|
"loss": 2.0291, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.964601769911505e-06, |
|
"loss": 2.1234, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.141592920353984e-06, |
|
"loss": 2.1356, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.31858407079646e-06, |
|
"loss": 2.1785, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.495575221238938e-06, |
|
"loss": 2.151, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.672566371681418e-06, |
|
"loss": 2.0698, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.849557522123895e-06, |
|
"loss": 2.2233, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.026548672566371e-06, |
|
"loss": 2.2075, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.203539823008851e-06, |
|
"loss": 2.1574, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.380530973451329e-06, |
|
"loss": 2.1002, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.557522123893806e-06, |
|
"loss": 2.1708, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.734513274336284e-06, |
|
"loss": 2.0356, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.911504424778762e-06, |
|
"loss": 2.196, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.008849557522124e-05, |
|
"loss": 2.1957, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0265486725663717e-05, |
|
"loss": 2.113, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0442477876106197e-05, |
|
"loss": 2.1256, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0619469026548675e-05, |
|
"loss": 2.1362, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.079646017699115e-05, |
|
"loss": 2.0819, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0973451327433629e-05, |
|
"loss": 2.0522, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1150442477876106e-05, |
|
"loss": 2.2226, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1327433628318584e-05, |
|
"loss": 2.1568, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1504424778761064e-05, |
|
"loss": 1.9969, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1681415929203541e-05, |
|
"loss": 2.0662, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1858407079646019e-05, |
|
"loss": 2.1751, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.2035398230088497e-05, |
|
"loss": 2.2176, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2212389380530973e-05, |
|
"loss": 2.0555, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2389380530973452e-05, |
|
"loss": 2.1516, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.256637168141593e-05, |
|
"loss": 2.258, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2743362831858408e-05, |
|
"loss": 2.0608, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2920353982300886e-05, |
|
"loss": 2.1596, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3097345132743363e-05, |
|
"loss": 2.1454, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3274336283185843e-05, |
|
"loss": 2.0923, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.345132743362832e-05, |
|
"loss": 2.0999, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3628318584070797e-05, |
|
"loss": 2.0944, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3805309734513275e-05, |
|
"loss": 2.1493, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3982300884955752e-05, |
|
"loss": 2.1631, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4159292035398232e-05, |
|
"loss": 2.1735, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.433628318584071e-05, |
|
"loss": 2.2144, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4513274336283187e-05, |
|
"loss": 2.1732, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4690265486725665e-05, |
|
"loss": 2.1125, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4867256637168143e-05, |
|
"loss": 2.1681, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5044247787610619e-05, |
|
"loss": 2.0783, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5221238938053098e-05, |
|
"loss": 2.0296, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5398230088495576e-05, |
|
"loss": 2.1401, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5575221238938054e-05, |
|
"loss": 2.1291, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5752212389380532e-05, |
|
"loss": 2.1044, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.592920353982301e-05, |
|
"loss": 2.0911, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6106194690265487e-05, |
|
"loss": 2.1175, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.628318584070797e-05, |
|
"loss": 2.1085, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.6460176991150443e-05, |
|
"loss": 2.1192, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.663716814159292e-05, |
|
"loss": 2.1747, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.68141592920354e-05, |
|
"loss": 2.1093, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6991150442477876e-05, |
|
"loss": 2.3442, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7168141592920354e-05, |
|
"loss": 2.0882, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7345132743362835e-05, |
|
"loss": 2.2757, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7522123893805313e-05, |
|
"loss": 2.1743, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.769911504424779e-05, |
|
"loss": 2.1613, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7876106194690265e-05, |
|
"loss": 2.0899, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8053097345132743e-05, |
|
"loss": 2.1127, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.823008849557522e-05, |
|
"loss": 2.0457, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8407079646017702e-05, |
|
"loss": 2.0061, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.858407079646018e-05, |
|
"loss": 2.0532, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8761061946902657e-05, |
|
"loss": 2.2078, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8938053097345135e-05, |
|
"loss": 2.1356, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9115044247787613e-05, |
|
"loss": 2.1502, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.929203539823009e-05, |
|
"loss": 2.2263, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.946902654867257e-05, |
|
"loss": 2.1397, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9646017699115046e-05, |
|
"loss": 2.0656, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9823008849557524e-05, |
|
"loss": 2.0862, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 2.1775, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.999999626936191e-05, |
|
"loss": 2.0169, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9999985077450406e-05, |
|
"loss": 2.1069, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9999966424273852e-05, |
|
"loss": 2.1435, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.999994030984616e-05, |
|
"loss": 2.2267, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9999906734186815e-05, |
|
"loss": 1.997, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999986569732087e-05, |
|
"loss": 2.2769, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999817199278942e-05, |
|
"loss": 1.9394, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999761240097216e-05, |
|
"loss": 2.121, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999969781981745e-05, |
|
"loss": 2.2084, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999626938486954e-05, |
|
"loss": 2.0314, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999548596158626e-05, |
|
"loss": 2.2046, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999462792890913e-05, |
|
"loss": 2.1457, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999369528747838e-05, |
|
"loss": 2.1497, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999926880379898e-05, |
|
"loss": 2.0739, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999160618119502e-05, |
|
"loss": 2.2034, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.999904497179012e-05, |
|
"loss": 2.1544, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9998921864897123e-05, |
|
"loss": 2.2104, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9998791297532362e-05, |
|
"loss": 2.052, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999865326979326e-05, |
|
"loss": 2.2697, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998507781782802e-05, |
|
"loss": 2.0682, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998354833609537e-05, |
|
"loss": 2.0808, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998194425387588e-05, |
|
"loss": 2.0473, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999802655723664e-05, |
|
"loss": 2.1415, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997851229281942e-05, |
|
"loss": 2.0943, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999766844165431e-05, |
|
"loss": 2.162, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997478194490135e-05, |
|
"loss": 2.1148, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997280487931355e-05, |
|
"loss": 2.3097, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9997075322125492e-05, |
|
"loss": 2.0729, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9996862697225624e-05, |
|
"loss": 2.2898, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999664261339039e-05, |
|
"loss": 2.1436, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9996415070784007e-05, |
|
"loss": 2.0268, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999618006957625e-05, |
|
"loss": 2.1917, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9995937609942463e-05, |
|
"loss": 2.0689, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9995687692063547e-05, |
|
"loss": 2.0695, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999543031612597e-05, |
|
"loss": 2.0363, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9995165482321775e-05, |
|
"loss": 2.0635, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994893190848556e-05, |
|
"loss": 2.2353, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999461344190948e-05, |
|
"loss": 2.1787, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994326235713278e-05, |
|
"loss": 2.136, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9994031572474238e-05, |
|
"loss": 2.0413, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9993729452412214e-05, |
|
"loss": 2.1219, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9993419875752632e-05, |
|
"loss": 2.1356, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999310284272647e-05, |
|
"loss": 2.0748, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9992778353570284e-05, |
|
"loss": 2.0701, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9992446408526177e-05, |
|
"loss": 2.238, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999210700784182e-05, |
|
"loss": 2.194, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999176015177046e-05, |
|
"loss": 2.2741, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9991405840570886e-05, |
|
"loss": 2.1712, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9991044074507463e-05, |
|
"loss": 2.0362, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9990674853850113e-05, |
|
"loss": 2.0989, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9990298178874322e-05, |
|
"loss": 2.1951, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989914049861145e-05, |
|
"loss": 2.1943, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.998952246709718e-05, |
|
"loss": 2.1334, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989123430874602e-05, |
|
"loss": 2.1329, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9988716941491142e-05, |
|
"loss": 2.1719, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.99883029992501e-05, |
|
"loss": 2.0233, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998788160446032e-05, |
|
"loss": 2.2163, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998745275743622e-05, |
|
"loss": 2.2141, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998701645849778e-05, |
|
"loss": 2.2667, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9986572707970527e-05, |
|
"loss": 2.052, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9986121506185557e-05, |
|
"loss": 2.0601, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9985662853479525e-05, |
|
"loss": 2.089, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9985196750194647e-05, |
|
"loss": 2.2365, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9984723196678694e-05, |
|
"loss": 2.167, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9984242193284996e-05, |
|
"loss": 2.0078, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9983753740372442e-05, |
|
"loss": 2.0686, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9983257838305487e-05, |
|
"loss": 2.1354, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9982754487454124e-05, |
|
"loss": 1.9609, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9982243688193935e-05, |
|
"loss": 2.1828, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9981725440906023e-05, |
|
"loss": 2.195, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998119974597708e-05, |
|
"loss": 2.1612, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9980666603799336e-05, |
|
"loss": 2.115, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998012601477058e-05, |
|
"loss": 2.0758, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997957797929417e-05, |
|
"loss": 2.1763, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9979022497779003e-05, |
|
"loss": 2.1175, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997845957063954e-05, |
|
"loss": 2.104, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9977889198295794e-05, |
|
"loss": 2.1183, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997731138117334e-05, |
|
"loss": 2.2253, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997672611970331e-05, |
|
"loss": 2.0031, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997613341432237e-05, |
|
"loss": 2.1712, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9975533265472756e-05, |
|
"loss": 2.187, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9974925673602263e-05, |
|
"loss": 2.1687, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.997431063916423e-05, |
|
"loss": 2.0695, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9973688162617545e-05, |
|
"loss": 2.1737, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9973058244426663e-05, |
|
"loss": 2.0203, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9972420885061576e-05, |
|
"loss": 2.2102, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9971776084997844e-05, |
|
"loss": 2.156, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9971123844716562e-05, |
|
"loss": 2.0291, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9970464164704387e-05, |
|
"loss": 2.0157, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.996979704545353e-05, |
|
"loss": 2.1021, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.996912248746174e-05, |
|
"loss": 2.1, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9968440491232326e-05, |
|
"loss": 2.078, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9967751057274147e-05, |
|
"loss": 2.0041, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.99670541861016e-05, |
|
"loss": 2.1863, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996634987823465e-05, |
|
"loss": 2.1564, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9965638134198792e-05, |
|
"loss": 2.0383, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9964918954525086e-05, |
|
"loss": 2.073, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996419233975013e-05, |
|
"loss": 2.2136, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9963458290416066e-05, |
|
"loss": 1.9862, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9962716807070592e-05, |
|
"loss": 2.0693, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9961967890266948e-05, |
|
"loss": 2.0951, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996121154056392e-05, |
|
"loss": 2.1478, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9960447758525846e-05, |
|
"loss": 2.1191, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9959676544722605e-05, |
|
"loss": 2.1407, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9958897899729616e-05, |
|
"loss": 2.2314, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995811182412785e-05, |
|
"loss": 2.1224, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995731831850382e-05, |
|
"loss": 2.2138, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9956517383449587e-05, |
|
"loss": 2.0954, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995570901956274e-05, |
|
"loss": 2.0866, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995489322744643e-05, |
|
"loss": 1.984, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995407000770934e-05, |
|
"loss": 2.1659, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9953239360965697e-05, |
|
"loss": 2.134, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995240128783527e-05, |
|
"loss": 2.0198, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9951555788943364e-05, |
|
"loss": 2.1189, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9950702864920837e-05, |
|
"loss": 2.0313, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9949842516404073e-05, |
|
"loss": 2.0241, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9948974744035002e-05, |
|
"loss": 2.003, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9948099548461098e-05, |
|
"loss": 2.1218, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9947216930335363e-05, |
|
"loss": 2.051, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9946326890316345e-05, |
|
"loss": 2.09, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9945429429068127e-05, |
|
"loss": 2.1685, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9944524547260334e-05, |
|
"loss": 2.1266, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9943612245568115e-05, |
|
"loss": 2.1289, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994269252467217e-05, |
|
"loss": 2.097, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9941765385258723e-05, |
|
"loss": 2.0301, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9940830828019547e-05, |
|
"loss": 2.1081, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9939888853651933e-05, |
|
"loss": 2.1121, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9938939462858714e-05, |
|
"loss": 2.1643, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9937982656348262e-05, |
|
"loss": 2.0362, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9937018434834472e-05, |
|
"loss": 2.0981, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.993604679903678e-05, |
|
"loss": 2.0792, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9935067749680145e-05, |
|
"loss": 2.057, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.993408128749507e-05, |
|
"loss": 2.0001, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9933087413217575e-05, |
|
"loss": 2.133, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.993208612758922e-05, |
|
"loss": 2.0653, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9931077431357095e-05, |
|
"loss": 2.2018, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9930061325273812e-05, |
|
"loss": 2.0735, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9929037810097516e-05, |
|
"loss": 2.1075, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992800688659188e-05, |
|
"loss": 2.0776, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9926968555526108e-05, |
|
"loss": 2.1681, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9925922817674923e-05, |
|
"loss": 2.0747, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992486967381858e-05, |
|
"loss": 2.1578, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992380912474286e-05, |
|
"loss": 2.1087, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9922741171239064e-05, |
|
"loss": 2.0957, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9921665814104027e-05, |
|
"loss": 2.1029, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9920583054140104e-05, |
|
"loss": 2.1479, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9919492892155164e-05, |
|
"loss": 2.1984, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9918395328962615e-05, |
|
"loss": 2.2156, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9917290365381373e-05, |
|
"loss": 2.1825, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9916178002235886e-05, |
|
"loss": 2.1642, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.991505824035612e-05, |
|
"loss": 2.0837, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9913931080577554e-05, |
|
"loss": 2.0187, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.99127965237412e-05, |
|
"loss": 2.1871, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9911654570693576e-05, |
|
"loss": 2.0616, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.991050522228673e-05, |
|
"loss": 2.1396, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9909348479378216e-05, |
|
"loss": 2.0462, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990818434283112e-05, |
|
"loss": 2.0684, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990701281351403e-05, |
|
"loss": 2.0668, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9905833892301067e-05, |
|
"loss": 2.1311, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990464758007184e-05, |
|
"loss": 2.2016, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.99034538777115e-05, |
|
"loss": 2.2369, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9902252786110702e-05, |
|
"loss": 2.0509, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9901044306165607e-05, |
|
"loss": 2.1006, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.98998284387779e-05, |
|
"loss": 2.1585, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9898605184854773e-05, |
|
"loss": 2.2726, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9897374545308928e-05, |
|
"loss": 2.2056, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.989613652105858e-05, |
|
"loss": 2.0896, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9894891113027457e-05, |
|
"loss": 2.0929, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.989363832214479e-05, |
|
"loss": 2.0166, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.989237814934531e-05, |
|
"loss": 2.1788, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9891110595569283e-05, |
|
"loss": 1.9928, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9889835661762457e-05, |
|
"loss": 2.1352, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.98885533488761e-05, |
|
"loss": 2.1533, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9887263657866974e-05, |
|
"loss": 2.0889, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.988596658969736e-05, |
|
"loss": 2.1308, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9884662145335033e-05, |
|
"loss": 2.1413, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9883350325753276e-05, |
|
"loss": 2.1864, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9882031131930876e-05, |
|
"loss": 2.042, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9880704564852112e-05, |
|
"loss": 2.2396, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9879370625506783e-05, |
|
"loss": 2.1106, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.987802931489017e-05, |
|
"loss": 2.1762, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9876680634003068e-05, |
|
"loss": 2.0019, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.987532458385176e-05, |
|
"loss": 2.1555, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.987396116544803e-05, |
|
"loss": 2.0512, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9872590379809173e-05, |
|
"loss": 2.1658, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9871212227957962e-05, |
|
"loss": 2.1243, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9869826710922676e-05, |
|
"loss": 2.1094, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9868433829737085e-05, |
|
"loss": 2.0905, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9867033585440457e-05, |
|
"loss": 2.0737, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9865625979077557e-05, |
|
"loss": 2.1049, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9864211011698635e-05, |
|
"loss": 2.1059, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986278868435944e-05, |
|
"loss": 2.0882, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9861358998121207e-05, |
|
"loss": 1.9053, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9859921954050664e-05, |
|
"loss": 2.0445, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9858477553220034e-05, |
|
"loss": 2.1344, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9857025796707018e-05, |
|
"loss": 2.0787, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9855566685594817e-05, |
|
"loss": 2.1858, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9854100220972112e-05, |
|
"loss": 2.0423, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.985262640393307e-05, |
|
"loss": 2.1353, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9851145235577352e-05, |
|
"loss": 2.157, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9849656717010094e-05, |
|
"loss": 2.0816, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9848160849341927e-05, |
|
"loss": 2.032, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.984665763368895e-05, |
|
"loss": 1.9813, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.984514707117276e-05, |
|
"loss": 2.1839, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9843629162920428e-05, |
|
"loss": 2.0142, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9842103910064507e-05, |
|
"loss": 2.1252, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.984057131374303e-05, |
|
"loss": 2.1124, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9839031375099514e-05, |
|
"loss": 2.1058, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983748409528294e-05, |
|
"loss": 2.1472, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9835929475447786e-05, |
|
"loss": 2.2478, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9834367516753992e-05, |
|
"loss": 2.0825, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9832798220366977e-05, |
|
"loss": 2.1809, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9831221587457642e-05, |
|
"loss": 2.081, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982963761920235e-05, |
|
"loss": 2.1381, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982804631678295e-05, |
|
"loss": 2.0337, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982644768138675e-05, |
|
"loss": 1.9488, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.982484171420654e-05, |
|
"loss": 1.9757, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9823228416440577e-05, |
|
"loss": 1.9545, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9821607789292584e-05, |
|
"loss": 2.1547, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9819979833971756e-05, |
|
"loss": 2.0237, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.981834455169276e-05, |
|
"loss": 2.1029, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.981670194367572e-05, |
|
"loss": 2.0786, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9815052011146233e-05, |
|
"loss": 2.1815, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.981339475533536e-05, |
|
"loss": 2.1501, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9811730177479624e-05, |
|
"loss": 2.0851, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9810058278821017e-05, |
|
"loss": 2.0512, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.980837906060698e-05, |
|
"loss": 2.1895, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9806692524090434e-05, |
|
"loss": 2.0738, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9804998670529742e-05, |
|
"loss": 2.0929, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.980329750118874e-05, |
|
"loss": 2.1284, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9801589017336715e-05, |
|
"loss": 2.0856, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9799873220248417e-05, |
|
"loss": 2.1048, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9798150111204045e-05, |
|
"loss": 1.9984, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9796419691489263e-05, |
|
"loss": 2.2915, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9794681962395182e-05, |
|
"loss": 2.2062, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.979293692521837e-05, |
|
"loss": 2.2924, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.979118458126085e-05, |
|
"loss": 2.1264, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.978942493183009e-05, |
|
"loss": 2.0657, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9787657978239014e-05, |
|
"loss": 2.1687, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9785883721805997e-05, |
|
"loss": 2.1672, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9784102163854862e-05, |
|
"loss": 2.0972, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9782313305714873e-05, |
|
"loss": 2.217, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9780517148720752e-05, |
|
"loss": 2.1013, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9778713694212662e-05, |
|
"loss": 2.0173, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9776902943536203e-05, |
|
"loss": 2.094, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.977508489804243e-05, |
|
"loss": 2.2898, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9773259559087838e-05, |
|
"loss": 2.1117, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9771426928034362e-05, |
|
"loss": 2.2154, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976958700624938e-05, |
|
"loss": 2.1857, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9767739795105708e-05, |
|
"loss": 2.089, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.97658852959816e-05, |
|
"loss": 2.0843, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976402351026075e-05, |
|
"loss": 2.1967, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976215443933229e-05, |
|
"loss": 2.0014, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.976027808459078e-05, |
|
"loss": 2.0905, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.975839444743622e-05, |
|
"loss": 2.1205, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9756503529274047e-05, |
|
"loss": 2.283, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975460533151513e-05, |
|
"loss": 2.1516, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975269985557576e-05, |
|
"loss": 2.0907, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9750787102877673e-05, |
|
"loss": 2.0709, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9748867074848022e-05, |
|
"loss": 2.0006, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9746939772919393e-05, |
|
"loss": 2.1337, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.97450051985298e-05, |
|
"loss": 1.9718, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.974306335312268e-05, |
|
"loss": 2.0384, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.97411142381469e-05, |
|
"loss": 2.2065, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9739157855056746e-05, |
|
"loss": 2.052, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9737194205311935e-05, |
|
"loss": 2.0993, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9735223290377595e-05, |
|
"loss": 2.1628, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9733245111724282e-05, |
|
"loss": 2.2113, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9731259670827973e-05, |
|
"loss": 2.071, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9729266969170048e-05, |
|
"loss": 2.0015, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9727267008237334e-05, |
|
"loss": 2.1402, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9725259789522043e-05, |
|
"loss": 2.2953, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9723245314521827e-05, |
|
"loss": 2.0347, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9721223584739735e-05, |
|
"loss": 2.1246, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9719194601684236e-05, |
|
"loss": 2.2483, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.971715836686921e-05, |
|
"loss": 2.0186, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.971511488181395e-05, |
|
"loss": 2.0507, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9713064148043156e-05, |
|
"loss": 2.0041, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.971100616708694e-05, |
|
"loss": 2.2381, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9708940940480813e-05, |
|
"loss": 1.9932, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9706868469765697e-05, |
|
"loss": 2.0435, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9704788756487926e-05, |
|
"loss": 2.1168, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.970270180219923e-05, |
|
"loss": 2.2486, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9700607608456734e-05, |
|
"loss": 2.074, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.969850617682299e-05, |
|
"loss": 2.003, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9696397508865917e-05, |
|
"loss": 2.2014, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9694281606158864e-05, |
|
"loss": 2.0637, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.969215847028056e-05, |
|
"loss": 2.2759, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9690028102815132e-05, |
|
"loss": 2.0274, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.968789050535211e-05, |
|
"loss": 1.9691, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9685745679486408e-05, |
|
"loss": 2.2409, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.968359362681835e-05, |
|
"loss": 2.1545, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9681434348953638e-05, |
|
"loss": 2.1214, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9679267847503362e-05, |
|
"loss": 2.0591, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.967709412408402e-05, |
|
"loss": 2.102, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9674913180317478e-05, |
|
"loss": 2.123, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9672725017831003e-05, |
|
"loss": 2.1348, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9670529638257242e-05, |
|
"loss": 2.1212, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9668327043234225e-05, |
|
"loss": 2.0202, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9666117234405378e-05, |
|
"loss": 2.1056, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9663900213419492e-05, |
|
"loss": 2.1624, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9661675981930747e-05, |
|
"loss": 2.1116, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965944454159871e-05, |
|
"loss": 2.0525, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965720589408832e-05, |
|
"loss": 2.0346, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.965496004106989e-05, |
|
"loss": 2.0764, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9652706984219114e-05, |
|
"loss": 2.0636, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9650446725217056e-05, |
|
"loss": 2.058, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9648179265750165e-05, |
|
"loss": 2.1546, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964590460751025e-05, |
|
"loss": 1.9647, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9643622752194496e-05, |
|
"loss": 1.9524, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9641333701505465e-05, |
|
"loss": 2.0231, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9639037457151072e-05, |
|
"loss": 2.1501, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9636734020844614e-05, |
|
"loss": 2.0024, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.963442339430475e-05, |
|
"loss": 2.0111, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9632105579255497e-05, |
|
"loss": 1.993, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.962978057742624e-05, |
|
"loss": 2.0784, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9627448390551736e-05, |
|
"loss": 2.1103, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9625109020372085e-05, |
|
"loss": 2.0631, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.962276246863276e-05, |
|
"loss": 2.0654, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9620408737084586e-05, |
|
"loss": 2.1347, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9618047827483745e-05, |
|
"loss": 2.1274, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9615679741591784e-05, |
|
"loss": 1.9769, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9613304481175594e-05, |
|
"loss": 1.9048, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9610922048007418e-05, |
|
"loss": 2.0066, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.960853244386486e-05, |
|
"loss": 2.1428, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9606135670530872e-05, |
|
"loss": 2.0388, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9603731729793746e-05, |
|
"loss": 1.9821, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.960132062344713e-05, |
|
"loss": 2.1089, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9598902353290024e-05, |
|
"loss": 2.061, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9596476921126757e-05, |
|
"loss": 2.0722, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9594044328767015e-05, |
|
"loss": 2.1043, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9591604578025825e-05, |
|
"loss": 2.2685, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.958915767072355e-05, |
|
"loss": 2.1525, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.958670360868589e-05, |
|
"loss": 2.2932, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.95842423937439e-05, |
|
"loss": 2.2267, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9581774027733947e-05, |
|
"loss": 1.9721, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9579298512497758e-05, |
|
"loss": 1.9634, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.957681584988238e-05, |
|
"loss": 2.1615, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.957432604174019e-05, |
|
"loss": 2.1338, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9571829089928913e-05, |
|
"loss": 2.1771, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9569324996311586e-05, |
|
"loss": 2.0738, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9566813762756584e-05, |
|
"loss": 2.0316, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.956429539113761e-05, |
|
"loss": 2.1739, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.956176988333369e-05, |
|
"loss": 2.1869, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9559237241229175e-05, |
|
"loss": 2.1589, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9556697466713738e-05, |
|
"loss": 2.2783, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9554150561682374e-05, |
|
"loss": 2.1501, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.95515965280354e-05, |
|
"loss": 1.9589, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9549035367678453e-05, |
|
"loss": 2.0964, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9546467082522486e-05, |
|
"loss": 2.2925, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9543891674483767e-05, |
|
"loss": 2.0813, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9541309145483872e-05, |
|
"loss": 2.1646, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.953871949744971e-05, |
|
"loss": 2.1302, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9536122732313476e-05, |
|
"loss": 2.0852, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9533518852012692e-05, |
|
"loss": 2.1051, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9530907858490195e-05, |
|
"loss": 2.0434, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9528289753694108e-05, |
|
"loss": 2.044, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9525664539577877e-05, |
|
"loss": 2.0711, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.952303221810024e-05, |
|
"loss": 2.2477, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9520392791225255e-05, |
|
"loss": 2.167, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951774626092226e-05, |
|
"loss": 2.2673, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.951509262916591e-05, |
|
"loss": 2.024, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9512431897936156e-05, |
|
"loss": 2.1577, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.950976406921824e-05, |
|
"loss": 1.9125, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.95070891450027e-05, |
|
"loss": 2.0845, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9504407127285377e-05, |
|
"loss": 1.9416, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9501718018067395e-05, |
|
"loss": 1.9172, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9499021819355168e-05, |
|
"loss": 2.0909, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.949631853316041e-05, |
|
"loss": 2.2726, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.949360816150012e-05, |
|
"loss": 2.0058, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9490890706396577e-05, |
|
"loss": 2.0582, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.948816616987735e-05, |
|
"loss": 2.1043, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.948543455397529e-05, |
|
"loss": 1.9863, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9482695860728534e-05, |
|
"loss": 2.1207, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.947995009218049e-05, |
|
"loss": 2.2377, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9477197250379862e-05, |
|
"loss": 2.0327, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9474437337380608e-05, |
|
"loss": 2.1055, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.947167035524199e-05, |
|
"loss": 1.9996, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.946889630602852e-05, |
|
"loss": 2.0112, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9466115191809995e-05, |
|
"loss": 2.026, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9463327014661484e-05, |
|
"loss": 2.1515, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.946053177666332e-05, |
|
"loss": 2.1027, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9457729479901103e-05, |
|
"loss": 2.0827, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9454920126465716e-05, |
|
"loss": 2.0898, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9452103718453283e-05, |
|
"loss": 2.0573, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.944928025796521e-05, |
|
"loss": 2.2742, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.944644974710816e-05, |
|
"loss": 2.0683, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.944361218799405e-05, |
|
"loss": 2.1139, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.944076758274007e-05, |
|
"loss": 2.0449, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9437915933468648e-05, |
|
"loss": 2.1037, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.943505724230748e-05, |
|
"loss": 1.959, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9432191511389524e-05, |
|
"loss": 2.2189, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.942931874285297e-05, |
|
"loss": 2.1891, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9426438938841278e-05, |
|
"loss": 2.0744, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9423552101503143e-05, |
|
"loss": 2.0531, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.942065823299252e-05, |
|
"loss": 2.0824, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9417757335468596e-05, |
|
"loss": 2.13, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.941484941109582e-05, |
|
"loss": 2.0355, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.941193446204387e-05, |
|
"loss": 2.073, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.940901249048767e-05, |
|
"loss": 2.0244, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9406083498607385e-05, |
|
"loss": 2.0352, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9403147488588414e-05, |
|
"loss": 2.0722, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9400204462621398e-05, |
|
"loss": 2.1696, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9397254422902208e-05, |
|
"loss": 2.2138, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9394297371631955e-05, |
|
"loss": 1.9755, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9391333311016968e-05, |
|
"loss": 2.1022, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9388362243268823e-05, |
|
"loss": 2.1782, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.938538417060431e-05, |
|
"loss": 2.2433, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9382399095245453e-05, |
|
"loss": 2.1096, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.93794070194195e-05, |
|
"loss": 2.1201, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9376407945358923e-05, |
|
"loss": 2.2764, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9373401875301407e-05, |
|
"loss": 1.987, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9370388811489873e-05, |
|
"loss": 2.1447, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9367368756172444e-05, |
|
"loss": 1.8911, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.936434171160247e-05, |
|
"loss": 2.0658, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9361307680038517e-05, |
|
"loss": 2.208, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.935826666374435e-05, |
|
"loss": 2.29, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9355218664988958e-05, |
|
"loss": 2.0086, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9352163686046546e-05, |
|
"loss": 1.946, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9349101729196505e-05, |
|
"loss": 2.2165, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9346032796723454e-05, |
|
"loss": 2.087, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.934295689091721e-05, |
|
"loss": 2.1251, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9339874014072785e-05, |
|
"loss": 2.0044, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9336784168490395e-05, |
|
"loss": 2.0714, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.933368735647547e-05, |
|
"loss": 2.1978, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9330583580338624e-05, |
|
"loss": 2.1544, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9327472842395666e-05, |
|
"loss": 1.9759, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9324355144967606e-05, |
|
"loss": 2.1178, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9321230490380644e-05, |
|
"loss": 2.055, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9318098880966173e-05, |
|
"loss": 2.026, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9314960319060768e-05, |
|
"loss": 1.9408, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.93118148070062e-05, |
|
"loss": 2.1526, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9308662347149423e-05, |
|
"loss": 2.0109, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9305502941842574e-05, |
|
"loss": 2.123, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.930233659344297e-05, |
|
"loss": 2.0683, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.929916330431312e-05, |
|
"loss": 2.0783, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9295983076820687e-05, |
|
"loss": 2.179, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9292795913338543e-05, |
|
"loss": 2.1003, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9289601816244708e-05, |
|
"loss": 2.0289, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.928640078792239e-05, |
|
"loss": 2.1366, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.928319283075996e-05, |
|
"loss": 2.0295, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.927997794715097e-05, |
|
"loss": 2.0243, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9276756139494134e-05, |
|
"loss": 2.2167, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9273527410193325e-05, |
|
"loss": 2.1146, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9270291761657592e-05, |
|
"loss": 2.0717, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9267049196301137e-05, |
|
"loss": 1.9456, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9263799716543335e-05, |
|
"loss": 2.0217, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9260543324808706e-05, |
|
"loss": 1.9718, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9257280023526938e-05, |
|
"loss": 2.0528, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9254009815132867e-05, |
|
"loss": 2.1248, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9250732702066487e-05, |
|
"loss": 1.9551, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9247448686772944e-05, |
|
"loss": 2.0266, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9244157771702533e-05, |
|
"loss": 2.1102, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.924085995931069e-05, |
|
"loss": 2.0152, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9237555252058015e-05, |
|
"loss": 2.1487, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.923424365241023e-05, |
|
"loss": 2.0398, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9230925162838223e-05, |
|
"loss": 2.1375, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9227599785817997e-05, |
|
"loss": 2.0095, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.922426752383072e-05, |
|
"loss": 2.1953, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9220928379362673e-05, |
|
"loss": 2.0316, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9217582354905295e-05, |
|
"loss": 2.0101, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.921422945295514e-05, |
|
"loss": 2.1603, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9210869676013906e-05, |
|
"loss": 2.1103, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.920750302658841e-05, |
|
"loss": 2.1324, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9204129507190604e-05, |
|
"loss": 2.3168, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.920074912033757e-05, |
|
"loss": 2.0659, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9197361868551498e-05, |
|
"loss": 1.9578, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9193967754359715e-05, |
|
"loss": 1.9142, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.919056678029466e-05, |
|
"loss": 2.1447, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9187158948893904e-05, |
|
"loss": 2.0808, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9183744262700114e-05, |
|
"loss": 2.2585, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.918032272426108e-05, |
|
"loss": 2.1271, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9176894336129717e-05, |
|
"loss": 2.0481, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9173459100864033e-05, |
|
"loss": 2.0968, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9170017021027153e-05, |
|
"loss": 2.0043, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9166568099187307e-05, |
|
"loss": 2.0674, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.916311233791783e-05, |
|
"loss": 2.0377, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9159649739797163e-05, |
|
"loss": 2.0513, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9156180307408846e-05, |
|
"loss": 1.9481, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9152704043341522e-05, |
|
"loss": 2.0555, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9149220950188917e-05, |
|
"loss": 2.0321, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9145731030549873e-05, |
|
"loss": 2.0564, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9142234287028313e-05, |
|
"loss": 2.0679, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.913873072223325e-05, |
|
"loss": 2.0356, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9135220338778797e-05, |
|
"loss": 2.0611, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9131703139284143e-05, |
|
"loss": 2.0958, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.912817912637357e-05, |
|
"loss": 2.0945, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9124648302676437e-05, |
|
"loss": 2.1156, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9121110670827193e-05, |
|
"loss": 2.1326, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9117566233465362e-05, |
|
"loss": 2.0849, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9114014993235552e-05, |
|
"loss": 2.0759, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9110456952787432e-05, |
|
"loss": 1.9964, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9106892114775763e-05, |
|
"loss": 2.0984, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9103320481860362e-05, |
|
"loss": 2.1734, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9099742056706123e-05, |
|
"loss": 2.1178, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9096156841983013e-05, |
|
"loss": 2.0401, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.909256484036606e-05, |
|
"loss": 2.1282, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.908896605453535e-05, |
|
"loss": 2.0475, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9085360487176037e-05, |
|
"loss": 1.8798, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.908174814097834e-05, |
|
"loss": 2.149, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.907812901863753e-05, |
|
"loss": 1.9444, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9074503122853923e-05, |
|
"loss": 2.2001, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9070870456332914e-05, |
|
"loss": 2.1119, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.906723102178493e-05, |
|
"loss": 2.0627, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9063584821925452e-05, |
|
"loss": 2.0413, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9059931859475013e-05, |
|
"loss": 2.0902, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9056272137159187e-05, |
|
"loss": 2.1869, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9052605657708596e-05, |
|
"loss": 2.0562, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9048932423858903e-05, |
|
"loss": 1.9537, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9045252438350803e-05, |
|
"loss": 2.0217, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.904156570393004e-05, |
|
"loss": 2.0087, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9037872223347385e-05, |
|
"loss": 1.9944, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9034171999358655e-05, |
|
"loss": 2.1688, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9030465034724676e-05, |
|
"loss": 2.1464, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9026751332211324e-05, |
|
"loss": 2.0621, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9023030894589496e-05, |
|
"loss": 2.0693, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.901930372463511e-05, |
|
"loss": 2.1817, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9015569825129112e-05, |
|
"loss": 2.0721, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9011829198857467e-05, |
|
"loss": 2.0774, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.900808184861116e-05, |
|
"loss": 2.078, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9004327777186195e-05, |
|
"loss": 2.04, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9000566987383582e-05, |
|
"loss": 2.0963, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8996799482009355e-05, |
|
"loss": 2.0912, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8993025263874552e-05, |
|
"loss": 2.1518, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8989244335795222e-05, |
|
"loss": 2.1706, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.898545670059242e-05, |
|
"loss": 1.9684, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8981662361092206e-05, |
|
"loss": 2.215, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.897786132012564e-05, |
|
"loss": 2.028, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.8974053580528786e-05, |
|
"loss": 2.1806, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.89702391451427e-05, |
|
"loss": 2.1355, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8966418016813443e-05, |
|
"loss": 2.1247, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8962590198392057e-05, |
|
"loss": 1.9697, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.895875569273459e-05, |
|
"loss": 2.0094, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.895491450270207e-05, |
|
"loss": 1.9919, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.895106663116051e-05, |
|
"loss": 1.9849, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.894721208098092e-05, |
|
"loss": 1.9958, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8943350855039288e-05, |
|
"loss": 2.1585, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8939482956216573e-05, |
|
"loss": 2.1583, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.893560838739873e-05, |
|
"loss": 2.1848, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.893172715147667e-05, |
|
"loss": 2.2405, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8927839251346302e-05, |
|
"loss": 2.1478, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8923944689908492e-05, |
|
"loss": 2.1029, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.892004347006908e-05, |
|
"loss": 2.0691, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891613559473887e-05, |
|
"loss": 2.0549, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.891222106683364e-05, |
|
"loss": 2.039, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.890829988927413e-05, |
|
"loss": 1.9734, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8904372064986033e-05, |
|
"loss": 1.9615, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.890043759690001e-05, |
|
"loss": 1.9322, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.889649648795167e-05, |
|
"loss": 2.0899, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.889254874108159e-05, |
|
"loss": 2.0542, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8888594359235297e-05, |
|
"loss": 2.141, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8884633345363257e-05, |
|
"loss": 2.045, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8880665702420894e-05, |
|
"loss": 2.094, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.887669143336858e-05, |
|
"loss": 1.9949, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8872710541171614e-05, |
|
"loss": 2.2024, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8868723028800263e-05, |
|
"loss": 1.983, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.886472889922972e-05, |
|
"loss": 2.1915, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8860728155440108e-05, |
|
"loss": 1.9994, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8856720800416496e-05, |
|
"loss": 2.1001, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.885270683714888e-05, |
|
"loss": 2.132, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8848686268632193e-05, |
|
"loss": 2.1833, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.884465909786629e-05, |
|
"loss": 2.2095, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.884062532785595e-05, |
|
"loss": 2.0305, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.883658496161089e-05, |
|
"loss": 2.1877, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8832538002145728e-05, |
|
"loss": 2.0565, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8828484452480024e-05, |
|
"loss": 2.0808, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8824424315638233e-05, |
|
"loss": 2.066, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.882035759464974e-05, |
|
"loss": 2.237, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8816284292548833e-05, |
|
"loss": 2.0739, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8812204412374724e-05, |
|
"loss": 2.1036, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.880811795717152e-05, |
|
"loss": 1.992, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8804024929988234e-05, |
|
"loss": 1.9937, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8799925333878793e-05, |
|
"loss": 2.1947, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8795819171902015e-05, |
|
"loss": 2.1933, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8791706447121623e-05, |
|
"loss": 2.1005, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.878758716260623e-05, |
|
"loss": 2.1701, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8783461321429356e-05, |
|
"loss": 2.1513, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8779328926669397e-05, |
|
"loss": 1.9733, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8775189981409655e-05, |
|
"loss": 1.9488, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.87710444887383e-05, |
|
"loss": 2.1372, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.876689245174841e-05, |
|
"loss": 2.0654, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.876273387353793e-05, |
|
"loss": 2.0267, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8758568757209687e-05, |
|
"loss": 1.9816, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.875439710587139e-05, |
|
"loss": 2.0952, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8750218922635633e-05, |
|
"loss": 2.0556, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.874603421061986e-05, |
|
"loss": 1.9524, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.874184297294641e-05, |
|
"loss": 2.1978, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8737645212742474e-05, |
|
"loss": 2.1391, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8733440933140127e-05, |
|
"loss": 1.9921, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8729230137276287e-05, |
|
"loss": 2.1958, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.872501282829275e-05, |
|
"loss": 2.2318, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8720789009336165e-05, |
|
"loss": 2.18, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8716558683558046e-05, |
|
"loss": 2.1911, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8712321854114747e-05, |
|
"loss": 2.1474, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.870807852416749e-05, |
|
"loss": 1.9531, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.870382869688234e-05, |
|
"loss": 2.1122, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8699572375430206e-05, |
|
"loss": 2.0319, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.869530956298685e-05, |
|
"loss": 2.0601, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8691040262732877e-05, |
|
"loss": 2.1528, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8686764477853726e-05, |
|
"loss": 2.105, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8682482211539675e-05, |
|
"loss": 2.0976, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8678193466985853e-05, |
|
"loss": 1.9635, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8673898247392197e-05, |
|
"loss": 2.0271, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.86695965559635e-05, |
|
"loss": 2.1241, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8665288395909362e-05, |
|
"loss": 2.0422, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8660973770444227e-05, |
|
"loss": 2.1433, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8656652682787356e-05, |
|
"loss": 2.172, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8652325136162836e-05, |
|
"loss": 2.061, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8647991133799558e-05, |
|
"loss": 2.0486, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8643650678931248e-05, |
|
"loss": 2.079, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.863930377479644e-05, |
|
"loss": 2.1625, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8634950424638477e-05, |
|
"loss": 2.0032, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8630590631705514e-05, |
|
"loss": 2.1242, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8626224399250515e-05, |
|
"loss": 2.0079, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.862185173053124e-05, |
|
"loss": 2.1512, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.861747262881027e-05, |
|
"loss": 2.1447, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.861308709735496e-05, |
|
"loss": 2.1407, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8608695139437486e-05, |
|
"loss": 2.1484, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8604296758334804e-05, |
|
"loss": 1.9151, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.859989195732867e-05, |
|
"loss": 2.2456, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.859548073970563e-05, |
|
"loss": 2.0356, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.859106310875701e-05, |
|
"loss": 2.1163, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8586639067778925e-05, |
|
"loss": 2.0882, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8582208620072283e-05, |
|
"loss": 1.9685, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8577771768942755e-05, |
|
"loss": 2.1537, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8573328517700805e-05, |
|
"loss": 2.2408, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.856887886966166e-05, |
|
"loss": 2.1632, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8564422828145327e-05, |
|
"loss": 2.1166, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.855996039647658e-05, |
|
"loss": 1.992, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8555491577984967e-05, |
|
"loss": 1.9231, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8551016376004796e-05, |
|
"loss": 2.0601, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8546534793875134e-05, |
|
"loss": 2.1245, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8542046834939816e-05, |
|
"loss": 2.079, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8537552502547435e-05, |
|
"loss": 2.038, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8533051800051333e-05, |
|
"loss": 2.0449, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.852854473080961e-05, |
|
"loss": 2.0672, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8524031298185114e-05, |
|
"loss": 1.9162, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.851951150554544e-05, |
|
"loss": 1.9797, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8514985356262932e-05, |
|
"loss": 2.0481, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.851045285371468e-05, |
|
"loss": 2.0876, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.85059140012825e-05, |
|
"loss": 1.9302, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.850136880235296e-05, |
|
"loss": 2.0301, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.849681726031736e-05, |
|
"loss": 2.0945, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8492259378571727e-05, |
|
"loss": 2.0323, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8487695160516825e-05, |
|
"loss": 1.9973, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8483124609558145e-05, |
|
"loss": 2.0284, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8478547729105898e-05, |
|
"loss": 2.0892, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.847396452257502e-05, |
|
"loss": 2.037, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8469374993385175e-05, |
|
"loss": 1.9884, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8464779144960726e-05, |
|
"loss": 1.9746, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8460176980730777e-05, |
|
"loss": 2.1478, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8455568504129117e-05, |
|
"loss": 2.0941, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8450953718594263e-05, |
|
"loss": 2.2028, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.844633262756943e-05, |
|
"loss": 2.0477, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.844170523450255e-05, |
|
"loss": 2.0261, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8437071542846242e-05, |
|
"loss": 2.2724, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8432431556057832e-05, |
|
"loss": 2.2646, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8427785277599347e-05, |
|
"loss": 2.0803, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8423132710937498e-05, |
|
"loss": 2.0853, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8418473859543694e-05, |
|
"loss": 2.101, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8413808726894038e-05, |
|
"loss": 2.1327, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8409137316469308e-05, |
|
"loss": 2.1332, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8404459631754978e-05, |
|
"loss": 2.0031, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8399775676241192e-05, |
|
"loss": 2.0672, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.839508545342278e-05, |
|
"loss": 2.0958, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8390388966799252e-05, |
|
"loss": 2.0105, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.838568621987478e-05, |
|
"loss": 1.9901, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8380977216158217e-05, |
|
"loss": 2.1736, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8376261959163076e-05, |
|
"loss": 1.9927, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.837154045240755e-05, |
|
"loss": 2.1751, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8366812699414476e-05, |
|
"loss": 1.9944, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8362078703711366e-05, |
|
"loss": 1.9317, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.835733846883038e-05, |
|
"loss": 1.9408, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8352591998308346e-05, |
|
"loss": 1.9305, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.834783929568673e-05, |
|
"loss": 2.0034, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.834308036451166e-05, |
|
"loss": 1.898, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8338315208333904e-05, |
|
"loss": 2.2036, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.833354383070887e-05, |
|
"loss": 2.2859, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8328766235196628e-05, |
|
"loss": 2.0754, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8323982425361864e-05, |
|
"loss": 2.1682, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8319192404773912e-05, |
|
"loss": 1.899, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.831439617700674e-05, |
|
"loss": 2.142, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8309593745638945e-05, |
|
"loss": 2.0643, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8304785114253757e-05, |
|
"loss": 2.0347, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8299970286439023e-05, |
|
"loss": 2.0681, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8295149265787224e-05, |
|
"loss": 2.2443, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8290322055895454e-05, |
|
"loss": 1.9751, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.828548866036543e-05, |
|
"loss": 2.1085, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8280649082803478e-05, |
|
"loss": 2.0898, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8275803326820545e-05, |
|
"loss": 2.0397, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.827095139603218e-05, |
|
"loss": 2.0453, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8266093294058542e-05, |
|
"loss": 2.1116, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.82612290245244e-05, |
|
"loss": 2.0554, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8256358591059115e-05, |
|
"loss": 2.0964, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8251481997296654e-05, |
|
"loss": 2.177, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.824659924687558e-05, |
|
"loss": 2.2019, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8241710343439042e-05, |
|
"loss": 2.0446, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8236815290634794e-05, |
|
"loss": 2.2175, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8231914092115164e-05, |
|
"loss": 2.068, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.822700675153707e-05, |
|
"loss": 2.2024, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8222093272562023e-05, |
|
"loss": 2.1152, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.82171736588561e-05, |
|
"loss": 2.1658, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8212247914089954e-05, |
|
"loss": 2.0257, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8207316041938835e-05, |
|
"loss": 2.1251, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8202378046082533e-05, |
|
"loss": 2.0856, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8197433930205433e-05, |
|
"loss": 2.0725, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8192483697996473e-05, |
|
"loss": 2.1683, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.818752735314916e-05, |
|
"loss": 2.0949, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8182564899361558e-05, |
|
"loss": 1.9965, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.817759634033629e-05, |
|
"loss": 2.0271, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8172621679780532e-05, |
|
"loss": 1.9172, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8167640921406026e-05, |
|
"loss": 2.1983, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8162654068929043e-05, |
|
"loss": 2.3338, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.815766112607042e-05, |
|
"loss": 2.0754, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.815266209655552e-05, |
|
"loss": 2.2493, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8147656984114268e-05, |
|
"loss": 2.1235, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.814264579248111e-05, |
|
"loss": 2.0683, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8137628525395032e-05, |
|
"loss": 2.1036, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.813260518659956e-05, |
|
"loss": 2.0402, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8127575779842744e-05, |
|
"loss": 2.0176, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8122540308877165e-05, |
|
"loss": 2.088, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8117498777459926e-05, |
|
"loss": 2.1039, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8112451189352653e-05, |
|
"loss": 1.9656, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8107397548321487e-05, |
|
"loss": 2.0934, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8102337858137094e-05, |
|
"loss": 2.0962, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8097272122574653e-05, |
|
"loss": 2.1393, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.809220034541384e-05, |
|
"loss": 2.0145, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8087122530438847e-05, |
|
"loss": 2.1078, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.808203868143838e-05, |
|
"loss": 2.0474, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8076948802205637e-05, |
|
"loss": 2.0055, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8071852896538314e-05, |
|
"loss": 2.0778, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.806675096823861e-05, |
|
"loss": 2.0673, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8061643021113215e-05, |
|
"loss": 1.96, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.805652905897331e-05, |
|
"loss": 2.0562, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8051409085634556e-05, |
|
"loss": 2.0278, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8046283104917116e-05, |
|
"loss": 2.1826, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8041151120645622e-05, |
|
"loss": 2.1399, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8036013136649186e-05, |
|
"loss": 2.2628, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8030869156761403e-05, |
|
"loss": 1.994, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.802571918482034e-05, |
|
"loss": 2.1652, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8020563224668534e-05, |
|
"loss": 2.0373, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8015401280152986e-05, |
|
"loss": 2.0345, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8010233355125163e-05, |
|
"loss": 2.0333, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.8005059453441002e-05, |
|
"loss": 2.0996, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.799987957896089e-05, |
|
"loss": 2.1884, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.799469373554968e-05, |
|
"loss": 2.0926, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7989501927076663e-05, |
|
"loss": 2.0505, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7984304157415602e-05, |
|
"loss": 1.956, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.797910043044469e-05, |
|
"loss": 1.9716, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7973890750046574e-05, |
|
"loss": 2.0674, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7968675120108338e-05, |
|
"loss": 2.0069, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.796345354452151e-05, |
|
"loss": 2.0547, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.795822602718205e-05, |
|
"loss": 2.1247, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7952992571990352e-05, |
|
"loss": 2.1307, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7947753182851248e-05, |
|
"loss": 2.1847, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.794250786367398e-05, |
|
"loss": 1.9865, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.793725661837223e-05, |
|
"loss": 2.0754, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7931999450864106e-05, |
|
"loss": 2.0803, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7926736365072116e-05, |
|
"loss": 1.9959, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7921467364923194e-05, |
|
"loss": 2.1058, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7916192454348687e-05, |
|
"loss": 2.1385, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.791091163728436e-05, |
|
"loss": 1.9837, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7905624917670365e-05, |
|
"loss": 2.0095, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7900332299451276e-05, |
|
"loss": 2.1201, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7895033786576056e-05, |
|
"loss": 2.0723, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.788972938299808e-05, |
|
"loss": 2.0061, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7884419092675104e-05, |
|
"loss": 1.993, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7879102919569283e-05, |
|
"loss": 2.0725, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7873780867647165e-05, |
|
"loss": 2.148, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7868452940879673e-05, |
|
"loss": 1.9938, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7863119143242125e-05, |
|
"loss": 2.1881, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7857779478714215e-05, |
|
"loss": 1.9317, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7852433951280012e-05, |
|
"loss": 2.0024, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7847082564927958e-05, |
|
"loss": 1.9247, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7841725323650876e-05, |
|
"loss": 2.0414, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7836362231445953e-05, |
|
"loss": 1.9478, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7830993292314736e-05, |
|
"loss": 2.126, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7825618510263144e-05, |
|
"loss": 2.0988, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.782023788930144e-05, |
|
"loss": 1.9235, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7814851433444263e-05, |
|
"loss": 1.9372, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7809459146710596e-05, |
|
"loss": 1.9239, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7804061033123767e-05, |
|
"loss": 1.8896, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7798657096711466e-05, |
|
"loss": 2.1105, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.779324734150571e-05, |
|
"loss": 1.8998, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.778783177154287e-05, |
|
"loss": 2.1331, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7782410390863664e-05, |
|
"loss": 1.9915, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7776983203513113e-05, |
|
"loss": 2.0835, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.777155021354061e-05, |
|
"loss": 1.9979, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7766111424999844e-05, |
|
"loss": 2.1237, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7760666841948857e-05, |
|
"loss": 2.1668, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7755216468449995e-05, |
|
"loss": 1.8196, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.774976030856994e-05, |
|
"loss": 1.9662, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7744298366379673e-05, |
|
"loss": 1.9102, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.773883064595451e-05, |
|
"loss": 2.1534, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7733357151374062e-05, |
|
"loss": 2.1858, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.772787788672226e-05, |
|
"loss": 2.2984, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7722392856087326e-05, |
|
"loss": 2.0689, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7716902063561798e-05, |
|
"loss": 2.134, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7711405513242513e-05, |
|
"loss": 2.1491, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.770590320923059e-05, |
|
"loss": 2.1084, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7700395155631456e-05, |
|
"loss": 2.07, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.769488135655482e-05, |
|
"loss": 2.1265, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.768936181611468e-05, |
|
"loss": 2.0257, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7683836538429314e-05, |
|
"loss": 2.2208, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7678305527621292e-05, |
|
"loss": 2.036, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7672768787817444e-05, |
|
"loss": 2.1872, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7667226323148894e-05, |
|
"loss": 2.0627, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.766167813775102e-05, |
|
"loss": 2.0086, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7656124235763487e-05, |
|
"loss": 2.0519, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7650564621330202e-05, |
|
"loss": 2.0584, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7644999298599353e-05, |
|
"loss": 2.1629, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7639428271723385e-05, |
|
"loss": 2.178, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7633851544858988e-05, |
|
"loss": 2.0371, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7628269122167115e-05, |
|
"loss": 2.0742, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7622681007812965e-05, |
|
"loss": 2.0686, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7617087205965987e-05, |
|
"loss": 1.9865, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7611487720799868e-05, |
|
"loss": 2.2099, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7605882556492536e-05, |
|
"loss": 2.0271, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7600271717226167e-05, |
|
"loss": 2.025, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7594655207187155e-05, |
|
"loss": 2.0317, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.758903303056614e-05, |
|
"loss": 1.9743, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.758340519155798e-05, |
|
"loss": 2.0186, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.757777169436176e-05, |
|
"loss": 2.0269, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7572132543180787e-05, |
|
"loss": 2.0942, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7566487742222594e-05, |
|
"loss": 1.949, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7560837295698915e-05, |
|
"loss": 2.0108, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7555181207825712e-05, |
|
"loss": 1.9715, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.754951948282314e-05, |
|
"loss": 2.1552, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.754385212491557e-05, |
|
"loss": 2.1164, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.753817913833158e-05, |
|
"loss": 2.1771, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.753250052730394e-05, |
|
"loss": 2.0059, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7526816296069616e-05, |
|
"loss": 2.1509, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.752112644886977e-05, |
|
"loss": 2.1531, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7515430989949754e-05, |
|
"loss": 2.0267, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7509729923559113e-05, |
|
"loss": 2.2791, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.750402325395156e-05, |
|
"loss": 2.0161, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.749831098538501e-05, |
|
"loss": 2.142, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7492593122121534e-05, |
|
"loss": 2.0069, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7486869668427396e-05, |
|
"loss": 2.1469, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7481140628573017e-05, |
|
"loss": 2.0611, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7475406006832996e-05, |
|
"loss": 1.9623, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7469665807486088e-05, |
|
"loss": 1.9778, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7463920034815214e-05, |
|
"loss": 2.1141, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7458168693107465e-05, |
|
"loss": 2.232, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7452411786654063e-05, |
|
"loss": 1.8763, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.74466493197504e-05, |
|
"loss": 1.9351, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.744088129669601e-05, |
|
"loss": 2.0438, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7435107721794577e-05, |
|
"loss": 1.9983, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7429328599353926e-05, |
|
"loss": 2.033, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.742354393368601e-05, |
|
"loss": 2.1257, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.741775372910694e-05, |
|
"loss": 2.054, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7411957989936944e-05, |
|
"loss": 2.0819, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7406156720500376e-05, |
|
"loss": 2.0178, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7400349925125733e-05, |
|
"loss": 2.1249, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.739453760814562e-05, |
|
"loss": 2.015, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.738871977389677e-05, |
|
"loss": 2.0478, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7382896426720025e-05, |
|
"loss": 1.927, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7377067570960352e-05, |
|
"loss": 2.1537, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7371233210966814e-05, |
|
"loss": 2.0347, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7365393351092598e-05, |
|
"loss": 2.0972, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7359547995694975e-05, |
|
"loss": 2.1136, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7353697149135327e-05, |
|
"loss": 2.0218, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.734784081577914e-05, |
|
"loss": 2.0246, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7341978999995975e-05, |
|
"loss": 1.9411, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7336111706159508e-05, |
|
"loss": 2.0379, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7330238938647475e-05, |
|
"loss": 1.9772, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.732436070184172e-05, |
|
"loss": 2.0398, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7318477000128153e-05, |
|
"loss": 2.1369, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7312587837896767e-05, |
|
"loss": 1.9949, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7306693219541632e-05, |
|
"loss": 2.1325, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7300793149460884e-05, |
|
"loss": 2.1197, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7294887632056724e-05, |
|
"loss": 1.9793, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7288976671735428e-05, |
|
"loss": 1.9091, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7283060272907322e-05, |
|
"loss": 2.0585, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7277138439986797e-05, |
|
"loss": 2.0667, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7271211177392296e-05, |
|
"loss": 2.1258, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.726527848954631e-05, |
|
"loss": 2.0552, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7259340380875384e-05, |
|
"loss": 2.2264, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7253396855810108e-05, |
|
"loss": 2.0313, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7247447918785104e-05, |
|
"loss": 2.1198, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7241493574239043e-05, |
|
"loss": 2.1439, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.723553382661462e-05, |
|
"loss": 2.1, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.722956868035858e-05, |
|
"loss": 2.0662, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7223598139921667e-05, |
|
"loss": 2.135, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7217622209758676e-05, |
|
"loss": 2.0709, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7211640894328413e-05, |
|
"loss": 2.1558, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.72056541980937e-05, |
|
"loss": 1.9143, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7199662125521375e-05, |
|
"loss": 2.0896, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7193664681082296e-05, |
|
"loss": 2.0557, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7187661869251314e-05, |
|
"loss": 2.009, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7181653694507297e-05, |
|
"loss": 2.0489, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.717564016133311e-05, |
|
"loss": 2.1122, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7169621274215614e-05, |
|
"loss": 2.047, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7163597037645666e-05, |
|
"loss": 1.9579, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7157567456118124e-05, |
|
"loss": 2.0689, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.715153253413181e-05, |
|
"loss": 1.9044, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7145492276189565e-05, |
|
"loss": 2.0589, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7139446686798176e-05, |
|
"loss": 1.98, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.713339577046843e-05, |
|
"loss": 1.9663, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7127339531715085e-05, |
|
"loss": 2.1015, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7121277975056863e-05, |
|
"loss": 2.1151, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7115211105016465e-05, |
|
"loss": 2.0571, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7109138926120548e-05, |
|
"loss": 1.9339, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7103061442899727e-05, |
|
"loss": 2.0227, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.709697865988859e-05, |
|
"loss": 2.0826, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.709089058162566e-05, |
|
"loss": 2.139, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7084797212653427e-05, |
|
"loss": 2.0555, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.707869855751832e-05, |
|
"loss": 1.9024, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.707259462077071e-05, |
|
"loss": 1.9507, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7066485406964917e-05, |
|
"loss": 2.1505, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7060370920659194e-05, |
|
"loss": 1.9748, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7054251166415726e-05, |
|
"loss": 2.1809, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7048126148800634e-05, |
|
"loss": 1.9567, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.704199587238396e-05, |
|
"loss": 2.0266, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7035860341739676e-05, |
|
"loss": 1.972, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7029719561445665e-05, |
|
"loss": 2.093, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7023573536083734e-05, |
|
"loss": 2.0699, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7017422270239608e-05, |
|
"loss": 2.0017, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7011265768502912e-05, |
|
"loss": 2.0849, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7005104035467182e-05, |
|
"loss": 1.9899, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.699893707572986e-05, |
|
"loss": 2.0793, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6992764893892283e-05, |
|
"loss": 2.0402, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6986587494559684e-05, |
|
"loss": 1.8528, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6980404882341192e-05, |
|
"loss": 2.0223, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6974217061849828e-05, |
|
"loss": 2.036, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6968024037702493e-05, |
|
"loss": 2.0401, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6961825814519976e-05, |
|
"loss": 2.181, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.695562239692694e-05, |
|
"loss": 2.1262, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6949413789551926e-05, |
|
"loss": 2.0758, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.694319999702735e-05, |
|
"loss": 2.1068, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.693698102398949e-05, |
|
"loss": 2.1767, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6930756875078496e-05, |
|
"loss": 2.0053, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6924527554938383e-05, |
|
"loss": 1.9366, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.691829306821701e-05, |
|
"loss": 2.1567, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.69120534195661e-05, |
|
"loss": 2.1365, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6905808613641233e-05, |
|
"loss": 2.1564, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6899558655101832e-05, |
|
"loss": 1.9482, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6893303548611152e-05, |
|
"loss": 2.078, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6887043298836318e-05, |
|
"loss": 1.9897, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.688077791044826e-05, |
|
"loss": 2.0035, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6874507388121767e-05, |
|
"loss": 1.9257, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.686823173653544e-05, |
|
"loss": 1.9665, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6861950960371728e-05, |
|
"loss": 1.9981, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6855665064316878e-05, |
|
"loss": 1.9285, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6849374053060984e-05, |
|
"loss": 2.1091, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6843077931297935e-05, |
|
"loss": 2.0266, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.683677670372544e-05, |
|
"loss": 2.2318, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6830470375045026e-05, |
|
"loss": 2.0764, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.682415894996201e-05, |
|
"loss": 1.9991, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.681784243318553e-05, |
|
"loss": 2.1128, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.681152082942851e-05, |
|
"loss": 2.0251, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6805194143407672e-05, |
|
"loss": 2.0431, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.679886237984353e-05, |
|
"loss": 2.0584, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.679252554346039e-05, |
|
"loss": 2.0676, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6786183638986336e-05, |
|
"loss": 2.0422, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6779836671153247e-05, |
|
"loss": 2.1321, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6773484644696764e-05, |
|
"loss": 2.0751, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6767127564356312e-05, |
|
"loss": 2.2097, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6760765434875082e-05, |
|
"loss": 1.9918, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6754398261000038e-05, |
|
"loss": 2.0792, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.67480260474819e-05, |
|
"loss": 1.9113, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.674164879907516e-05, |
|
"loss": 2.0272, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6735266520538046e-05, |
|
"loss": 1.996, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6728879216632567e-05, |
|
"loss": 2.0135, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6722486892124458e-05, |
|
"loss": 1.9634, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6716089551783212e-05, |
|
"loss": 2.0898, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6709687200382057e-05, |
|
"loss": 2.0836, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6703279842697974e-05, |
|
"loss": 2.0394, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6696867483511657e-05, |
|
"loss": 2.0271, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6690450127607555e-05, |
|
"loss": 2.1467, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6684027779773827e-05, |
|
"loss": 2.1812, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6677600444802365e-05, |
|
"loss": 2.103, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6671168127488785e-05, |
|
"loss": 2.0507, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6664730832632417e-05, |
|
"loss": 1.8555, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.66582885650363e-05, |
|
"loss": 2.0102, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.665184132950719e-05, |
|
"loss": 2.2401, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6645389130855547e-05, |
|
"loss": 1.9668, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6638931973895537e-05, |
|
"loss": 2.1121, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.663246986344502e-05, |
|
"loss": 2.1751, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6626002804325555e-05, |
|
"loss": 2.0091, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6619530801362396e-05, |
|
"loss": 2.0214, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6613053859384483e-05, |
|
"loss": 1.9509, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.660657198322444e-05, |
|
"loss": 1.9866, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.660008517771857e-05, |
|
"loss": 2.0551, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6593593447706865e-05, |
|
"loss": 1.8956, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6587096798032984e-05, |
|
"loss": 1.9995, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.658059523354425e-05, |
|
"loss": 2.0836, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6574088759091664e-05, |
|
"loss": 2.0789, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6567577379529884e-05, |
|
"loss": 1.8307, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6561061099717235e-05, |
|
"loss": 1.9807, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6554539924515688e-05, |
|
"loss": 2.0764, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6548013858790873e-05, |
|
"loss": 2.1244, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6541482907412073e-05, |
|
"loss": 2.0074, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6534947075252205e-05, |
|
"loss": 2.0529, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6528406367187836e-05, |
|
"loss": 2.1062, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6521860788099165e-05, |
|
"loss": 2.0786, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6515310342870038e-05, |
|
"loss": 2.2249, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.650875503638792e-05, |
|
"loss": 2.0701, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6502194873543902e-05, |
|
"loss": 2.0088, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6495629859232706e-05, |
|
"loss": 2.0509, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6489059998352668e-05, |
|
"loss": 2.0652, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6482485295805748e-05, |
|
"loss": 2.0662, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6475905756497505e-05, |
|
"loss": 1.841, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6469321385337123e-05, |
|
"loss": 2.0366, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6462732187237377e-05, |
|
"loss": 2.0557, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6456138167114658e-05, |
|
"loss": 1.9588, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6449539329888934e-05, |
|
"loss": 2.0547, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.644293568048379e-05, |
|
"loss": 1.9544, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.643632722382639e-05, |
|
"loss": 2.0879, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6429713964847483e-05, |
|
"loss": 2.0551, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6423095908481405e-05, |
|
"loss": 2.043, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.641647305966607e-05, |
|
"loss": 1.9611, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6409845423342967e-05, |
|
"loss": 2.0304, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6403213004457163e-05, |
|
"loss": 2.0919, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6396575807957288e-05, |
|
"loss": 2.0027, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6389933838795532e-05, |
|
"loss": 1.9958, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.638328710192766e-05, |
|
"loss": 2.0816, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6376635602312983e-05, |
|
"loss": 2.1248, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6369979344914364e-05, |
|
"loss": 2.1249, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6363318334698224e-05, |
|
"loss": 1.9988, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.635665257663453e-05, |
|
"loss": 2.103, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6349982075696778e-05, |
|
"loss": 2.0667, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6343306836862026e-05, |
|
"loss": 1.9818, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6336626865110846e-05, |
|
"loss": 1.9757, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.632994216542735e-05, |
|
"loss": 2.1121, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6323252742799182e-05, |
|
"loss": 2.1197, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.63165586022175e-05, |
|
"loss": 2.0426, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6309859748676985e-05, |
|
"loss": 2.046, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6303156187175843e-05, |
|
"loss": 2.2339, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6296447922715782e-05, |
|
"loss": 2.1643, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6289734960302026e-05, |
|
"loss": 1.993, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6283017304943296e-05, |
|
"loss": 1.9325, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6276294961651832e-05, |
|
"loss": 2.1446, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.626956793544335e-05, |
|
"loss": 2.183, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.626283623133707e-05, |
|
"loss": 2.0298, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.625609985435571e-05, |
|
"loss": 2.081, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6249358809525457e-05, |
|
"loss": 2.0124, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6242613101875998e-05, |
|
"loss": 1.9765, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6235862736440488e-05, |
|
"loss": 1.9998, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6229107718255566e-05, |
|
"loss": 2.155, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.622234805236133e-05, |
|
"loss": 2.0082, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.621558374380136e-05, |
|
"loss": 1.9278, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6208814797622695e-05, |
|
"loss": 2.1352, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6202041218875825e-05, |
|
"loss": 1.9633, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6195263012614705e-05, |
|
"loss": 1.9558, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.618848018389675e-05, |
|
"loss": 2.008, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6181692737782806e-05, |
|
"loss": 2.0697, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6174900679337185e-05, |
|
"loss": 2.1206, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6168104013627618e-05, |
|
"loss": 2.1269, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6161302745725292e-05, |
|
"loss": 1.9201, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.615449688070482e-05, |
|
"loss": 1.9636, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6147686423644243e-05, |
|
"loss": 1.9432, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6140871379625033e-05, |
|
"loss": 2.2027, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6134051753732083e-05, |
|
"loss": 2.0445, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6127227551053704e-05, |
|
"loss": 2.2301, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.612039877668162e-05, |
|
"loss": 2.1108, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6113565435710975e-05, |
|
"loss": 1.9934, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.61067275332403e-05, |
|
"loss": 1.9785, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.609988507437156e-05, |
|
"loss": 2.0852, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.609303806421009e-05, |
|
"loss": 1.936, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6086186507864635e-05, |
|
"loss": 1.9845, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6079330410447337e-05, |
|
"loss": 1.8619, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6072469777073712e-05, |
|
"loss": 2.0956, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6065604612862674e-05, |
|
"loss": 2.0737, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6058734922936507e-05, |
|
"loss": 2.0087, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.605186071242088e-05, |
|
"loss": 2.0417, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6044981986444827e-05, |
|
"loss": 1.9942, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.603809875014076e-05, |
|
"loss": 2.0002, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.603121100864445e-05, |
|
"loss": 1.8892, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.602431876709503e-05, |
|
"loss": 2.1565, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6017422030634993e-05, |
|
"loss": 2.0255, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6010520804410185e-05, |
|
"loss": 2.1183, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6003615093569803e-05, |
|
"loss": 2.1372, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5996704903266384e-05, |
|
"loss": 2.078, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5989790238655812e-05, |
|
"loss": 2.0253, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5982871104897315e-05, |
|
"loss": 1.9555, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.597594750715344e-05, |
|
"loss": 1.9375, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5969019450590087e-05, |
|
"loss": 2.2252, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5962086940376465e-05, |
|
"loss": 2.0209, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5955149981685107e-05, |
|
"loss": 2.1932, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5948208579691876e-05, |
|
"loss": 2.0681, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5941262739575937e-05, |
|
"loss": 2.1148, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.593431246651978e-05, |
|
"loss": 2.0945, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.592735776570919e-05, |
|
"loss": 1.9131, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5920398642333265e-05, |
|
"loss": 2.0219, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.59134351015844e-05, |
|
"loss": 2.1576, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.590646714865828e-05, |
|
"loss": 2.0444, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.589949478875389e-05, |
|
"loss": 2.1156, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5892518027073504e-05, |
|
"loss": 1.8617, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.588553686882267e-05, |
|
"loss": 1.9904, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5878551319210228e-05, |
|
"loss": 2.0382, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5871561383448287e-05, |
|
"loss": 2.0144, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.586456706675223e-05, |
|
"loss": 1.9559, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5857568374340713e-05, |
|
"loss": 2.0797, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5850565311435652e-05, |
|
"loss": 2.1611, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5843557883262224e-05, |
|
"loss": 2.1894, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5836546095048873e-05, |
|
"loss": 2.1399, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5829529952027276e-05, |
|
"loss": 2.0913, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.582250945943238e-05, |
|
"loss": 2.1283, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.581548462250236e-05, |
|
"loss": 1.8905, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.580845544647865e-05, |
|
"loss": 2.036, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5801421936605904e-05, |
|
"loss": 2.0889, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.579438409813203e-05, |
|
"loss": 2.1695, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5787341936308135e-05, |
|
"loss": 1.9981, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5780295456388587e-05, |
|
"loss": 2.167, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5773244663630955e-05, |
|
"loss": 2.008, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5766189563296027e-05, |
|
"loss": 1.9873, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.575913016064781e-05, |
|
"loss": 2.1178, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5752066460953522e-05, |
|
"loss": 2.0543, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5744998469483576e-05, |
|
"loss": 2.1543, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5737926191511607e-05, |
|
"loss": 1.9831, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5730849632314428e-05, |
|
"loss": 2.0396, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5723768797172057e-05, |
|
"loss": 2.0369, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5716683691367704e-05, |
|
"loss": 2.0141, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.570959432018776e-05, |
|
"loss": 1.8861, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5702500688921804e-05, |
|
"loss": 2.009, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5695402802862586e-05, |
|
"loss": 2.0958, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5688300667306034e-05, |
|
"loss": 1.964, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.568119428755125e-05, |
|
"loss": 2.0791, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.56740836689005e-05, |
|
"loss": 2.1173, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5666968816659213e-05, |
|
"loss": 1.8829, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5659849736135978e-05, |
|
"loss": 2.0971, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5652726432642533e-05, |
|
"loss": 2.0022, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5645598911493777e-05, |
|
"loss": 1.8265, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5638467178007744e-05, |
|
"loss": 2.0991, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5631331237505625e-05, |
|
"loss": 1.9832, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5624191095311736e-05, |
|
"loss": 1.9539, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.561704675675354e-05, |
|
"loss": 2.1531, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5609898227161618e-05, |
|
"loss": 2.0157, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5602745511869693e-05, |
|
"loss": 2.1204, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5595588616214596e-05, |
|
"loss": 1.9266, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.558842754553629e-05, |
|
"loss": 2.0615, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5581262305177848e-05, |
|
"loss": 1.9658, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.557409290048545e-05, |
|
"loss": 2.061, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5566919336808388e-05, |
|
"loss": 2.1229, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.555974161949906e-05, |
|
"loss": 2.0, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5552559753912952e-05, |
|
"loss": 1.966, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.554537374540866e-05, |
|
"loss": 1.5199, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5538183599347852e-05, |
|
"loss": 1.4886, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.553098932109531e-05, |
|
"loss": 1.4961, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.552379091601887e-05, |
|
"loss": 1.4554, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.551658838948947e-05, |
|
"loss": 1.5205, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.550938174688111e-05, |
|
"loss": 1.3946, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5502170993570863e-05, |
|
"loss": 1.4881, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5494956134938877e-05, |
|
"loss": 1.4212, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5487737176368352e-05, |
|
"loss": 1.4514, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5480514123245557e-05, |
|
"loss": 1.3637, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5473286980959805e-05, |
|
"loss": 1.5361, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5466055754903472e-05, |
|
"loss": 1.4138, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5458820450471976e-05, |
|
"loss": 1.4126, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5451581073063772e-05, |
|
"loss": 1.3707, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.544433762808036e-05, |
|
"loss": 1.4751, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5437090120926284e-05, |
|
"loss": 1.5566, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.54298385570091e-05, |
|
"loss": 1.5796, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.54225829417394e-05, |
|
"loss": 1.4938, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5415323280530804e-05, |
|
"loss": 1.4589, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.540805957879994e-05, |
|
"loss": 1.4819, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5400791841966466e-05, |
|
"loss": 1.5304, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5393520075453026e-05, |
|
"loss": 1.4266, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5386244284685302e-05, |
|
"loss": 1.5542, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5378964475091952e-05, |
|
"loss": 1.3573, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5371680652104644e-05, |
|
"loss": 1.5648, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5364392821158042e-05, |
|
"loss": 1.2882, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5357100987689798e-05, |
|
"loss": 1.3606, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5349805157140553e-05, |
|
"loss": 1.2781, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5342505334953922e-05, |
|
"loss": 1.4364, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5335201526576505e-05, |
|
"loss": 1.5174, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.532789373745788e-05, |
|
"loss": 1.4761, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5320581973050587e-05, |
|
"loss": 1.4595, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5313266238810133e-05, |
|
"loss": 1.3712, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5305946540194992e-05, |
|
"loss": 1.2939, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.529862288266659e-05, |
|
"loss": 1.2925, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5291295271689318e-05, |
|
"loss": 1.4234, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.52839637127305e-05, |
|
"loss": 1.4797, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5276628211260424e-05, |
|
"loss": 1.3987, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5269288772752298e-05, |
|
"loss": 1.461, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5261945402682292e-05, |
|
"loss": 1.3517, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.525459810652949e-05, |
|
"loss": 1.5289, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5247246889775915e-05, |
|
"loss": 1.3701, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5239891757906509e-05, |
|
"loss": 1.4686, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5232532716409148e-05, |
|
"loss": 1.3453, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5225169770774605e-05, |
|
"loss": 1.4581, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5217802926496585e-05, |
|
"loss": 1.4185, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5210432189071691e-05, |
|
"loss": 1.5142, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5203057563999437e-05, |
|
"loss": 1.3549, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.519567905678223e-05, |
|
"loss": 1.4931, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5188296672925378e-05, |
|
"loss": 1.2532, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5180910417937084e-05, |
|
"loss": 1.3916, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5173520297328437e-05, |
|
"loss": 1.2732, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5166126316613409e-05, |
|
"loss": 1.3864, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5158728481308853e-05, |
|
"loss": 1.51, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5151326796934499e-05, |
|
"loss": 1.4698, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.514392126901295e-05, |
|
"loss": 1.4337, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.513651190306967e-05, |
|
"loss": 1.3157, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5129098704632994e-05, |
|
"loss": 1.423, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5121681679234112e-05, |
|
"loss": 1.4064, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.511426083240708e-05, |
|
"loss": 1.4638, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5106836169688788e-05, |
|
"loss": 1.4592, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5099407696618983e-05, |
|
"loss": 1.5686, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5091975418740257e-05, |
|
"loss": 1.3284, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5084539341598036e-05, |
|
"loss": 1.3091, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5077099470740582e-05, |
|
"loss": 1.3538, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5069655811718988e-05, |
|
"loss": 1.2863, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5062208370087178e-05, |
|
"loss": 1.5392, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.505475715140189e-05, |
|
"loss": 1.3982, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5047302161222684e-05, |
|
"loss": 1.3306, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.503984340511193e-05, |
|
"loss": 1.4184, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.503238088863482e-05, |
|
"loss": 1.4854, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5024914617359341e-05, |
|
"loss": 1.4061, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5017444596856283e-05, |
|
"loss": 1.4181, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.5009970832699234e-05, |
|
"loss": 1.4264, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.500249333046458e-05, |
|
"loss": 1.3223, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4995012095731487e-05, |
|
"loss": 1.4622, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.498752713408191e-05, |
|
"loss": 1.5162, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4980038451100589e-05, |
|
"loss": 1.4249, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.497254605237504e-05, |
|
"loss": 1.4347, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.496504994349554e-05, |
|
"loss": 1.3395, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.495755013005515e-05, |
|
"loss": 1.504, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4950046617649684e-05, |
|
"loss": 1.3939, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4942539411877727e-05, |
|
"loss": 1.2609, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4935028518340604e-05, |
|
"loss": 1.2513, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4927513942642402e-05, |
|
"loss": 1.4252, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4919995690389958e-05, |
|
"loss": 1.5399, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4912473767192842e-05, |
|
"loss": 1.4675, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4904948178663374e-05, |
|
"loss": 1.3371, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4897418930416598e-05, |
|
"loss": 1.3351, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4889886028070295e-05, |
|
"loss": 1.3205, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4882349477244975e-05, |
|
"loss": 1.4257, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4874809283563867e-05, |
|
"loss": 1.3122, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4867265452652913e-05, |
|
"loss": 1.5769, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4859717990140776e-05, |
|
"loss": 1.376, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.485216690165883e-05, |
|
"loss": 1.4463, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4844612192841143e-05, |
|
"loss": 1.3795, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4837053869324498e-05, |
|
"loss": 1.3144, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4829491936748368e-05, |
|
"loss": 1.5569, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4821926400754915e-05, |
|
"loss": 1.4278, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4814357266989e-05, |
|
"loss": 1.3739, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4806784541098162e-05, |
|
"loss": 1.5259, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.479920822873262e-05, |
|
"loss": 1.3453, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4791628335545269e-05, |
|
"loss": 1.3719, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4784044867191676e-05, |
|
"loss": 1.4023, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4776457829330078e-05, |
|
"loss": 1.3358, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4768867227621375e-05, |
|
"loss": 1.3767, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.476127306772912e-05, |
|
"loss": 1.7132, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4753675355319527e-05, |
|
"loss": 1.5239, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4746074096061463e-05, |
|
"loss": 1.3091, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4738469295626435e-05, |
|
"loss": 1.5133, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.473086095968859e-05, |
|
"loss": 1.3811, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4723249093924725e-05, |
|
"loss": 1.3599, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.471563370401426e-05, |
|
"loss": 1.4786, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4708014795639249e-05, |
|
"loss": 1.4444, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.470039237448437e-05, |
|
"loss": 1.4369, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4692766446236914e-05, |
|
"loss": 1.4035, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4685137016586808e-05, |
|
"loss": 1.4515, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4677504091226576e-05, |
|
"loss": 1.3604, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.466986767585135e-05, |
|
"loss": 1.4794, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4662227776158877e-05, |
|
"loss": 1.2529, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4654584397849496e-05, |
|
"loss": 1.3584, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4646937546626141e-05, |
|
"loss": 1.3986, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4639287228194339e-05, |
|
"loss": 1.3621, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4631633448262202e-05, |
|
"loss": 1.4718, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.462397621254043e-05, |
|
"loss": 1.4467, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4616315526742296e-05, |
|
"loss": 1.4251, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4608651396583649e-05, |
|
"loss": 1.3707, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.460098382778291e-05, |
|
"loss": 1.4508, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4593312826061063e-05, |
|
"loss": 1.2687, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4585638397141657e-05, |
|
"loss": 1.4205, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.457796054675079e-05, |
|
"loss": 1.4804, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4570279280617119e-05, |
|
"loss": 1.6334, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4562594604471851e-05, |
|
"loss": 1.3818, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.455490652404874e-05, |
|
"loss": 1.3645, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4547215045084065e-05, |
|
"loss": 1.3384, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4539520173316654e-05, |
|
"loss": 1.3147, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4531821914487869e-05, |
|
"loss": 1.6073, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4524120274341588e-05, |
|
"loss": 1.4234, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.451641525862422e-05, |
|
"loss": 1.5198, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4508706873084691e-05, |
|
"loss": 1.4815, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4500995123474436e-05, |
|
"loss": 1.5514, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4493280015547406e-05, |
|
"loss": 1.3883, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4485561555060059e-05, |
|
"loss": 1.4709, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4477839747771347e-05, |
|
"loss": 1.4957, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4470114599442729e-05, |
|
"loss": 1.4036, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4462386115838145e-05, |
|
"loss": 1.4091, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4454654302724036e-05, |
|
"loss": 1.3676, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.444691916586932e-05, |
|
"loss": 1.4983, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4439180711045395e-05, |
|
"loss": 1.4933, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4431438944026135e-05, |
|
"loss": 1.4883, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4423693870587886e-05, |
|
"loss": 1.5289, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4415945496509465e-05, |
|
"loss": 1.297, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4408193827572143e-05, |
|
"loss": 1.2833, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4400438869559659e-05, |
|
"loss": 1.4018, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.43926806282582e-05, |
|
"loss": 1.4689, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4384919109456402e-05, |
|
"loss": 1.356, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.437715431894535e-05, |
|
"loss": 1.4919, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4369386262518568e-05, |
|
"loss": 1.3713, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4361614945972018e-05, |
|
"loss": 1.3646, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4353840375104093e-05, |
|
"loss": 1.2597, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4346062555715619e-05, |
|
"loss": 1.2797, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4338281493609835e-05, |
|
"loss": 1.4289, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4330497194592407e-05, |
|
"loss": 1.5295, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4322709664471423e-05, |
|
"loss": 1.5427, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4314918909057372e-05, |
|
"loss": 1.5053, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4307124934163149e-05, |
|
"loss": 1.5216, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.429932774560405e-05, |
|
"loss": 1.3527, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.429152734919778e-05, |
|
"loss": 1.2904, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4283723750764429e-05, |
|
"loss": 1.5267, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4275916956126475e-05, |
|
"loss": 1.4632, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4268106971108781e-05, |
|
"loss": 1.3546, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4260293801538598e-05, |
|
"loss": 1.4501, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4252477453245546e-05, |
|
"loss": 1.3872, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4244657932061614e-05, |
|
"loss": 1.2606, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4236835243821168e-05, |
|
"loss": 1.4879, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4229009394360929e-05, |
|
"loss": 1.3569, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4221180389519984e-05, |
|
"loss": 1.479, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4213348235139761e-05, |
|
"loss": 1.4363, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4205512937064055e-05, |
|
"loss": 1.4399, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4197674501138993e-05, |
|
"loss": 1.467, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.418983293321305e-05, |
|
"loss": 1.3479, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4181988239137038e-05, |
|
"loss": 1.4517, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.41741404247641e-05, |
|
"loss": 1.3898, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4166289495949705e-05, |
|
"loss": 1.4365, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.415843545855165e-05, |
|
"loss": 1.4992, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4150578318430044e-05, |
|
"loss": 1.5308, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4142718081447324e-05, |
|
"loss": 1.54, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4134854753468226e-05, |
|
"loss": 1.3933, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4126988340359796e-05, |
|
"loss": 1.49, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.411911884799138e-05, |
|
"loss": 1.4395, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4111246282234626e-05, |
|
"loss": 1.5619, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4103370648963475e-05, |
|
"loss": 1.5005, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.409549195405415e-05, |
|
"loss": 1.4743, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4087610203385166e-05, |
|
"loss": 1.517, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4079725402837315e-05, |
|
"loss": 1.5285, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4071837558293664e-05, |
|
"loss": 1.4585, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4063946675639549e-05, |
|
"loss": 1.4732, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4056052760762577e-05, |
|
"loss": 1.3741, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4048155819552617e-05, |
|
"loss": 1.3908, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4040255857901797e-05, |
|
"loss": 1.3973, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.403235288170449e-05, |
|
"loss": 1.3177, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4024446896857331e-05, |
|
"loss": 1.2593, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4016537909259191e-05, |
|
"loss": 1.4716, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4008625924811185e-05, |
|
"loss": 1.4817, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4000710949416663e-05, |
|
"loss": 1.4242, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.3992792988981206e-05, |
|
"loss": 1.3364, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3984872049412623e-05, |
|
"loss": 1.5831, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3976948136620947e-05, |
|
"loss": 1.4545, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3969021256518424e-05, |
|
"loss": 1.5038, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3961091415019525e-05, |
|
"loss": 1.3621, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3953158618040918e-05, |
|
"loss": 1.394, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3945222871501486e-05, |
|
"loss": 1.3988, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3937284181322308e-05, |
|
"loss": 1.412, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3929342553426658e-05, |
|
"loss": 1.5237, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3921397993740003e-05, |
|
"loss": 1.4397, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.391345050819e-05, |
|
"loss": 1.3342, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3905500102706491e-05, |
|
"loss": 1.5929, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3897546783221484e-05, |
|
"loss": 1.3694, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.388959055566918e-05, |
|
"loss": 1.4133, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3881631425985935e-05, |
|
"loss": 1.5339, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3873669400110278e-05, |
|
"loss": 1.5302, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3865704483982894e-05, |
|
"loss": 1.2849, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.385773668354663e-05, |
|
"loss": 1.3443, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3849766004746477e-05, |
|
"loss": 1.354, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3841792453529582e-05, |
|
"loss": 1.439, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3833816035845233e-05, |
|
"loss": 1.3799, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3825836757644853e-05, |
|
"loss": 1.3255, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3817854624882002e-05, |
|
"loss": 1.3855, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3809869643512368e-05, |
|
"loss": 1.4254, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3801881819493772e-05, |
|
"loss": 1.3832, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3793891158786149e-05, |
|
"loss": 1.448, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3785897667351545e-05, |
|
"loss": 1.2874, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.377790135115413e-05, |
|
"loss": 1.369, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3769902216160176e-05, |
|
"loss": 1.5569, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.376190026833806e-05, |
|
"loss": 1.434, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3753895513658249e-05, |
|
"loss": 1.4307, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.374588795809332e-05, |
|
"loss": 1.5067, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3737877607617926e-05, |
|
"loss": 1.4275, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3729864468208817e-05, |
|
"loss": 1.4282, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3721848545844812e-05, |
|
"loss": 1.5925, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3713829846506813e-05, |
|
"loss": 1.3954, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3705808376177791e-05, |
|
"loss": 1.4708, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3697784140842796e-05, |
|
"loss": 1.528, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3689757146488918e-05, |
|
"loss": 1.3875, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3681727399105328e-05, |
|
"loss": 1.3771, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.367369490468324e-05, |
|
"loss": 1.4352, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.366565966921592e-05, |
|
"loss": 1.3498, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.365762169869868e-05, |
|
"loss": 1.3987, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3649580999128871e-05, |
|
"loss": 1.4374, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3641537576505882e-05, |
|
"loss": 1.3938, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3633491436831131e-05, |
|
"loss": 1.4352, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3625442586108066e-05, |
|
"loss": 1.4606, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3617391030342158e-05, |
|
"loss": 1.4801, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3609336775540892e-05, |
|
"loss": 1.5535, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3601279827713771e-05, |
|
"loss": 1.3629, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3593220192872308e-05, |
|
"loss": 1.3985, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.358515787703002e-05, |
|
"loss": 1.4363, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.357709288620242e-05, |
|
"loss": 1.3856, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3569025226407025e-05, |
|
"loss": 1.4375, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3560954903663333e-05, |
|
"loss": 1.4, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3552881923992838e-05, |
|
"loss": 1.5308, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3544806293419016e-05, |
|
"loss": 1.537, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.353672801796731e-05, |
|
"loss": 1.4314, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3528647103665149e-05, |
|
"loss": 1.3809, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.352056355654193e-05, |
|
"loss": 1.5236, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3512477382629008e-05, |
|
"loss": 1.4296, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3504388587959695e-05, |
|
"loss": 1.3762, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3496297178569275e-05, |
|
"loss": 1.5083, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3488203160494963e-05, |
|
"loss": 1.3264, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3480106539775935e-05, |
|
"loss": 1.426, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3472007322453298e-05, |
|
"loss": 1.471, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3463905514570105e-05, |
|
"loss": 1.4312, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.345580112217134e-05, |
|
"loss": 1.4058, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.344769415130391e-05, |
|
"loss": 1.3457, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3439584608016654e-05, |
|
"loss": 1.4246, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3431472498360325e-05, |
|
"loss": 1.3896, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3423357828387589e-05, |
|
"loss": 1.381, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.341524060415303e-05, |
|
"loss": 1.3823, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.340712083171313e-05, |
|
"loss": 1.2307, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3398998517126275e-05, |
|
"loss": 1.5409, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3390873666452753e-05, |
|
"loss": 1.5202, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3382746285754736e-05, |
|
"loss": 1.3458, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3374616381096285e-05, |
|
"loss": 1.4975, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.336648395854335e-05, |
|
"loss": 1.5972, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3358349024163754e-05, |
|
"loss": 1.419, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.33502115840272e-05, |
|
"loss": 1.4685, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3342071644205253e-05, |
|
"loss": 1.5517, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3333929210771346e-05, |
|
"loss": 1.5507, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3325784289800776e-05, |
|
"loss": 1.4057, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3317636887370695e-05, |
|
"loss": 1.4409, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.33094870095601e-05, |
|
"loss": 1.494, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.330133466244984e-05, |
|
"loss": 1.4179, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3293179852122613e-05, |
|
"loss": 1.2741, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3285022584662948e-05, |
|
"loss": 1.3995, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3276862866157199e-05, |
|
"loss": 1.5713, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3268700702693561e-05, |
|
"loss": 1.4212, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3260536100362055e-05, |
|
"loss": 1.3175, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.325236906525451e-05, |
|
"loss": 1.4957, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3244199603464581e-05, |
|
"loss": 1.192, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3236027721087724e-05, |
|
"loss": 1.3951, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3227853424221206e-05, |
|
"loss": 1.5306, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3219676718964103e-05, |
|
"loss": 1.6042, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3211497611417273e-05, |
|
"loss": 1.5467, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3203316107683376e-05, |
|
"loss": 1.3002, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3195132213866865e-05, |
|
"loss": 1.5206, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3186945936073961e-05, |
|
"loss": 1.5053, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.317875728041267e-05, |
|
"loss": 1.4805, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3170566252992782e-05, |
|
"loss": 1.5254, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3162372859925845e-05, |
|
"loss": 1.4268, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3154177107325174e-05, |
|
"loss": 1.4437, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3145979001305849e-05, |
|
"loss": 1.4107, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3137778547984704e-05, |
|
"loss": 1.4194, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3129575753480322e-05, |
|
"loss": 1.3659, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3121370623913033e-05, |
|
"loss": 1.5579, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3113163165404913e-05, |
|
"loss": 1.3553, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3104953384079772e-05, |
|
"loss": 1.4931, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3096741286063162e-05, |
|
"loss": 1.4716, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3088526877482343e-05, |
|
"loss": 1.5263, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.308031016446632e-05, |
|
"loss": 1.3165, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.307209115314581e-05, |
|
"loss": 1.3682, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3063869849653244e-05, |
|
"loss": 1.3977, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3055646260122763e-05, |
|
"loss": 1.3818, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3047420390690211e-05, |
|
"loss": 1.5744, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.303919224749314e-05, |
|
"loss": 1.4652, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3030961836670794e-05, |
|
"loss": 1.4566, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.302272916436411e-05, |
|
"loss": 1.3942, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3014494236715711e-05, |
|
"loss": 1.4514, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3006257059869907e-05, |
|
"loss": 1.3885, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2998017639972678e-05, |
|
"loss": 1.4102, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2989775983171688e-05, |
|
"loss": 1.4057, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.2981532095616262e-05, |
|
"loss": 1.3876, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2973285983457394e-05, |
|
"loss": 1.4518, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2965037652847734e-05, |
|
"loss": 1.4553, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.295678710994159e-05, |
|
"loss": 1.4334, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.294853436089492e-05, |
|
"loss": 1.4896, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2940279411865328e-05, |
|
"loss": 1.4025, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.293202226901206e-05, |
|
"loss": 1.4184, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2923762938495996e-05, |
|
"loss": 1.3865, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2915501426479655e-05, |
|
"loss": 1.4382, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2907237739127174e-05, |
|
"loss": 1.3508, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2898971882604325e-05, |
|
"loss": 1.3573, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2890703863078487e-05, |
|
"loss": 1.4157, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2882433686718656e-05, |
|
"loss": 1.4982, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2874161359695446e-05, |
|
"loss": 1.5905, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2865886888181059e-05, |
|
"loss": 1.2865, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2857610278349315e-05, |
|
"loss": 1.4524, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2849331536375615e-05, |
|
"loss": 1.4695, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2841050668436965e-05, |
|
"loss": 1.2064, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2832767680711941e-05, |
|
"loss": 1.3949, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2824482579380715e-05, |
|
"loss": 1.5902, |
|
"step": 1600 |
|
} |
|
], |
|
"max_steps": 3750, |
|
"num_train_epochs": 3, |
|
"total_flos": 1.62836665667158e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|