|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"global_step": 1434, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 1.1698, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 1.1583, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 1.1534, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 1.1585, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 1.1666, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.1554, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 1.1134, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.0795, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 1.0633, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.0393, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 1.081, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.0287, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 1.0222, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.0142, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.0115, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.0224, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 0.9992, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.9694, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 0.9774, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.973, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 0.9692, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9364, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0454545454545455e-05, |
|
"loss": 0.9278, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.9261, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.1363636363636366e-05, |
|
"loss": 0.9233, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 0.9133, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2272727272727274e-05, |
|
"loss": 0.9146, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.9026, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3181818181818183e-05, |
|
"loss": 0.9147, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.8883, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.4090909090909092e-05, |
|
"loss": 0.8921, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.8754, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.8642, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.8752, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.590909090909091e-05, |
|
"loss": 0.8625, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.8631, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.681818181818182e-05, |
|
"loss": 0.8759, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.8378, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.772727272727273e-05, |
|
"loss": 0.8465, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.861, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.8636363636363638e-05, |
|
"loss": 0.864, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.8496, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9545454545454546e-05, |
|
"loss": 0.8573, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2e-05, |
|
"loss": 0.8323, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9999974458878423e-05, |
|
"loss": 0.8355, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999897835644166e-05, |
|
"loss": 0.836, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999770130688628e-05, |
|
"loss": 0.8354, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999591344664163e-05, |
|
"loss": 0.8164, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999361478484043e-05, |
|
"loss": 0.8409, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9999080533322486e-05, |
|
"loss": 0.8201, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998748510614615e-05, |
|
"loss": 0.8194, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9998365412056476e-05, |
|
"loss": 0.8229, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999793123960503e-05, |
|
"loss": 0.8054, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.999744599547812e-05, |
|
"loss": 0.8046, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9996909682154487e-05, |
|
"loss": 0.8051, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999632230237373e-05, |
|
"loss": 0.8242, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999568385913633e-05, |
|
"loss": 0.8094, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999499435570359e-05, |
|
"loss": 0.8293, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.999425379559765e-05, |
|
"loss": 0.7989, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999346218260146e-05, |
|
"loss": 0.8044, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999261952075875e-05, |
|
"loss": 0.8044, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.999172581437403e-05, |
|
"loss": 0.8192, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9990781068012554e-05, |
|
"loss": 0.8225, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9989785286500294e-05, |
|
"loss": 0.8117, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998873847492393e-05, |
|
"loss": 0.8068, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9987640638630812e-05, |
|
"loss": 0.802, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9986491783228925e-05, |
|
"loss": 0.8123, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.998529191458689e-05, |
|
"loss": 0.7961, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.99840410388339e-05, |
|
"loss": 0.8135, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9982739162359707e-05, |
|
"loss": 0.8171, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.998138629181459e-05, |
|
"loss": 0.794, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.997998243410932e-05, |
|
"loss": 0.8025, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9978527596415108e-05, |
|
"loss": 0.808, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.99770217861636e-05, |
|
"loss": 0.7923, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9975465011046807e-05, |
|
"loss": 0.8104, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9973857279017092e-05, |
|
"loss": 0.8118, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.99721985982871e-05, |
|
"loss": 0.8007, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9970488977329757e-05, |
|
"loss": 0.7899, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9968728424878178e-05, |
|
"loss": 0.8023, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.996691694992567e-05, |
|
"loss": 0.7825, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9965054561725644e-05, |
|
"loss": 0.7865, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9963141269791606e-05, |
|
"loss": 0.7948, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9961177083897074e-05, |
|
"loss": 0.8035, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9959162014075553e-05, |
|
"loss": 0.8005, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995709607062047e-05, |
|
"loss": 0.7789, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.995497926408513e-05, |
|
"loss": 0.7977, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9952811605282653e-05, |
|
"loss": 0.8016, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.9950593105285927e-05, |
|
"loss": 0.7857, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.994832377542755e-05, |
|
"loss": 0.7965, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9946003627299766e-05, |
|
"loss": 0.7786, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9943632672754408e-05, |
|
"loss": 0.7796, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.9941210923902853e-05, |
|
"loss": 0.758, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.993873839311593e-05, |
|
"loss": 0.8005, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9936215093023884e-05, |
|
"loss": 0.7909, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.99336410365163e-05, |
|
"loss": 0.7983, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.9931016236742026e-05, |
|
"loss": 0.7788, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.992834070710914e-05, |
|
"loss": 0.7814, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.992561446128484e-05, |
|
"loss": 0.8033, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9922837513195406e-05, |
|
"loss": 0.799, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9920009877026106e-05, |
|
"loss": 0.7851, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9917131567221147e-05, |
|
"loss": 0.777, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.9914202598483576e-05, |
|
"loss": 0.7727, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9911222985775226e-05, |
|
"loss": 0.7576, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990819274431662e-05, |
|
"loss": 0.7592, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.990511188958692e-05, |
|
"loss": 0.7804, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9901980437323818e-05, |
|
"loss": 0.782, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.9898798403523475e-05, |
|
"loss": 0.7692, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9895565804440435e-05, |
|
"loss": 0.7847, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.989228265658754e-05, |
|
"loss": 0.7864, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9888948976735843e-05, |
|
"loss": 0.7833, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9885564781914528e-05, |
|
"loss": 0.7682, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.9882130089410822e-05, |
|
"loss": 0.7837, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.987864491676991e-05, |
|
"loss": 0.7801, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9875109281794828e-05, |
|
"loss": 0.7696, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.9871523202546395e-05, |
|
"loss": 0.7797, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986788669734311e-05, |
|
"loss": 0.7633, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.986419978476106e-05, |
|
"loss": 0.7837, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.986046248363381e-05, |
|
"loss": 0.7767, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.9856674813052345e-05, |
|
"loss": 0.7752, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.985283679236493e-05, |
|
"loss": 0.7957, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.984894844117704e-05, |
|
"loss": 0.7789, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9845009779351235e-05, |
|
"loss": 0.7819, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9841020827007096e-05, |
|
"loss": 0.7862, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9836981604521077e-05, |
|
"loss": 0.7779, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.983289213252644e-05, |
|
"loss": 0.7669, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.9828752431913116e-05, |
|
"loss": 0.7733, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9824562523827634e-05, |
|
"loss": 0.7853, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9820322429672978e-05, |
|
"loss": 0.7676, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.98160321711085e-05, |
|
"loss": 0.7738, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9811691770049806e-05, |
|
"loss": 0.7859, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.9807301248668638e-05, |
|
"loss": 0.7692, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9802860629392765e-05, |
|
"loss": 0.7514, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.979836993490586e-05, |
|
"loss": 0.772, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9793829188147406e-05, |
|
"loss": 0.7764, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9789238412312553e-05, |
|
"loss": 0.7641, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.9784597630852008e-05, |
|
"loss": 0.7735, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9779906867471937e-05, |
|
"loss": 0.7768, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.97751661461338e-05, |
|
"loss": 0.7526, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9770375491054264e-05, |
|
"loss": 0.7556, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9765534926705082e-05, |
|
"loss": 0.775, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.9760644477812928e-05, |
|
"loss": 0.7773, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.975570416935932e-05, |
|
"loss": 0.7845, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.9750714026580465e-05, |
|
"loss": 0.7721, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.974567407496712e-05, |
|
"loss": 0.7715, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.97405843402645e-05, |
|
"loss": 0.7581, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9735444848472108e-05, |
|
"loss": 0.7639, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.973025562584362e-05, |
|
"loss": 0.7635, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9725016698886748e-05, |
|
"loss": 0.7684, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9719728094363103e-05, |
|
"loss": 0.7984, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.9714389839288073e-05, |
|
"loss": 0.7468, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9709001960930655e-05, |
|
"loss": 0.7517, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9703564486813342e-05, |
|
"loss": 0.763, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9698077444711973e-05, |
|
"loss": 0.7766, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9692540862655587e-05, |
|
"loss": 0.7725, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.9686954768926288e-05, |
|
"loss": 0.7682, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.96813191920591e-05, |
|
"loss": 0.7624, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9675634160841807e-05, |
|
"loss": 0.7641, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9669899704314828e-05, |
|
"loss": 0.7651, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.966411585177105e-05, |
|
"loss": 0.7607, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.9658282632755694e-05, |
|
"loss": 0.7646, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9652400077066146e-05, |
|
"loss": 0.749, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.964646821475183e-05, |
|
"loss": 0.7618, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9640487076114012e-05, |
|
"loss": 0.7685, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.9634456691705705e-05, |
|
"loss": 0.7697, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.962837709233146e-05, |
|
"loss": 0.7626, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9622248309047233e-05, |
|
"loss": 0.7484, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9616070373160224e-05, |
|
"loss": 0.7707, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.960984331622872e-05, |
|
"loss": 0.7751, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.9603567170061918e-05, |
|
"loss": 0.7353, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.959724196671978e-05, |
|
"loss": 0.7564, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9590867738512874e-05, |
|
"loss": 0.7565, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9584444518002178e-05, |
|
"loss": 0.7539, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9577972337998944e-05, |
|
"loss": 0.7561, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.9571451231564523e-05, |
|
"loss": 0.7621, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.956488123201019e-05, |
|
"loss": 0.7693, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.955826237289697e-05, |
|
"loss": 0.7556, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9551594688035486e-05, |
|
"loss": 0.7524, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9544878211485763e-05, |
|
"loss": 0.7684, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.9538112977557077e-05, |
|
"loss": 0.7735, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9531299020807752e-05, |
|
"loss": 0.7673, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9524436376045014e-05, |
|
"loss": 0.758, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9517525078324787e-05, |
|
"loss": 0.74, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9510565162951538e-05, |
|
"loss": 0.7565, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.9503556665478066e-05, |
|
"loss": 0.762, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9496499621705357e-05, |
|
"loss": 0.7556, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9489394067682365e-05, |
|
"loss": 0.7562, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.948224003970586e-05, |
|
"loss": 0.7507, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.9475037574320217e-05, |
|
"loss": 0.7326, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9467786708317257e-05, |
|
"loss": 0.757, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.946048747873601e-05, |
|
"loss": 0.7552, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9453139922862595e-05, |
|
"loss": 0.7617, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9445744078229967e-05, |
|
"loss": 0.76, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9438299982617766e-05, |
|
"loss": 0.7313, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9430807674052092e-05, |
|
"loss": 0.7563, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9423267190805346e-05, |
|
"loss": 0.758, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9415678571396006e-05, |
|
"loss": 0.7436, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.940804185458844e-05, |
|
"loss": 0.7602, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.9400357079392714e-05, |
|
"loss": 0.75, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.939262428506438e-05, |
|
"loss": 0.7541, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9384843511104294e-05, |
|
"loss": 0.7484, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9377014797258384e-05, |
|
"loss": 0.7711, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.936913818351748e-05, |
|
"loss": 0.7809, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.9361213710117094e-05, |
|
"loss": 0.7455, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9353241417537216e-05, |
|
"loss": 0.7428, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.9345221346502093e-05, |
|
"loss": 0.7569, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.933715353798006e-05, |
|
"loss": 0.7488, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.932903803318329e-05, |
|
"loss": 0.7662, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9320874873567598e-05, |
|
"loss": 0.7498, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9312664100832236e-05, |
|
"loss": 0.7649, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.930440575691967e-05, |
|
"loss": 0.761, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9296099884015382e-05, |
|
"loss": 0.7688, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.9287746524547627e-05, |
|
"loss": 0.7587, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9279345721187236e-05, |
|
"loss": 0.7583, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9270897516847406e-05, |
|
"loss": 0.7487, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9262401954683452e-05, |
|
"loss": 0.7666, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9253859078092616e-05, |
|
"loss": 0.751, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.9245268930713825e-05, |
|
"loss": 0.7543, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.923663155642748e-05, |
|
"loss": 0.7403, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9227946999355226e-05, |
|
"loss": 0.7597, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9219215303859732e-05, |
|
"loss": 0.7498, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.921043651454445e-05, |
|
"loss": 0.7569, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.9201610676253412e-05, |
|
"loss": 0.7439, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.919273783407098e-05, |
|
"loss": 0.7614, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9183818033321612e-05, |
|
"loss": 0.7499, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9174851319569662e-05, |
|
"loss": 0.7508, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.916583773861911e-05, |
|
"loss": 0.7599, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.9156777336513352e-05, |
|
"loss": 0.7558, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9147670159534953e-05, |
|
"loss": 0.7509, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9138516254205416e-05, |
|
"loss": 0.7391, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.912931566728494e-05, |
|
"loss": 0.7374, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.9120068445772192e-05, |
|
"loss": 0.7526, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9110774636904052e-05, |
|
"loss": 0.7583, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.910143428815538e-05, |
|
"loss": 0.7504, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9092047447238775e-05, |
|
"loss": 0.7442, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9082614162104316e-05, |
|
"loss": 0.7393, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.9073134480939353e-05, |
|
"loss": 0.7401, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9063608452168214e-05, |
|
"loss": 0.758, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9054036124452e-05, |
|
"loss": 0.7385, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9044417546688295e-05, |
|
"loss": 0.7308, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9034752768010965e-05, |
|
"loss": 0.7462, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.902504183778986e-05, |
|
"loss": 0.7543, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.901528480563059e-05, |
|
"loss": 0.7655, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.9005481721374272e-05, |
|
"loss": 0.7359, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.899563263509725e-05, |
|
"loss": 0.7528, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.898573759711087e-05, |
|
"loss": 0.747, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.89757966579612e-05, |
|
"loss": 0.7362, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8965809868428798e-05, |
|
"loss": 0.7452, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8955777279528414e-05, |
|
"loss": 0.7614, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.894569894250877e-05, |
|
"loss": 0.7393, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.8935574908852272e-05, |
|
"loss": 0.7588, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.892540523027475e-05, |
|
"loss": 0.7505, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8915189958725207e-05, |
|
"loss": 0.762, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.890492914638554e-05, |
|
"loss": 0.7408, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8894622845670282e-05, |
|
"loss": 0.7448, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.8884271109226327e-05, |
|
"loss": 0.7557, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8873873989932666e-05, |
|
"loss": 0.7322, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.886343154090012e-05, |
|
"loss": 0.7492, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8852943815471058e-05, |
|
"loss": 0.7496, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8842410867219137e-05, |
|
"loss": 0.7666, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.8831832749949015e-05, |
|
"loss": 0.7269, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.882120951769609e-05, |
|
"loss": 0.7479, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8810541224726217e-05, |
|
"loss": 0.7458, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.879982792553543e-05, |
|
"loss": 0.7561, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.878906967484966e-05, |
|
"loss": 0.7624, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.8778266527624466e-05, |
|
"loss": 0.7443, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8767418539044753e-05, |
|
"loss": 0.7249, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8756525764524475e-05, |
|
"loss": 0.7394, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8745588259706366e-05, |
|
"loss": 0.741, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8734606080461657e-05, |
|
"loss": 0.7288, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.8723579282889784e-05, |
|
"loss": 0.7158, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.87125079233181e-05, |
|
"loss": 0.7335, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.8701392058301595e-05, |
|
"loss": 0.7453, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.86902317446226e-05, |
|
"loss": 0.7369, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.86790270392905e-05, |
|
"loss": 0.7603, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8667777999541444e-05, |
|
"loss": 0.751, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.865648468283805e-05, |
|
"loss": 0.7513, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8645147146869114e-05, |
|
"loss": 0.7456, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.863376544954931e-05, |
|
"loss": 0.726, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8622339649018907e-05, |
|
"loss": 0.7292, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8610869803643454e-05, |
|
"loss": 0.7446, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8599355972013493e-05, |
|
"loss": 0.7499, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8587798212944255e-05, |
|
"loss": 0.7538, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8576196585475376e-05, |
|
"loss": 0.7275, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.856455114887056e-05, |
|
"loss": 0.7494, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8552861962617318e-05, |
|
"loss": 0.7284, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.854112908642663e-05, |
|
"loss": 0.7521, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8529352580232668e-05, |
|
"loss": 0.7415, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8517532504192456e-05, |
|
"loss": 0.7358, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.8505668918685603e-05, |
|
"loss": 0.7431, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.849376188431396e-05, |
|
"loss": 0.7424, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8481811461901323e-05, |
|
"loss": 0.7521, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.8469817712493148e-05, |
|
"loss": 0.7374, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.845778069735618e-05, |
|
"loss": 0.7406, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8445700477978207e-05, |
|
"loss": 0.7219, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8433577116067684e-05, |
|
"loss": 0.7136, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8421410673553475e-05, |
|
"loss": 0.7286, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8409201212584493e-05, |
|
"loss": 0.7359, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8396948795529405e-05, |
|
"loss": 0.7451, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8384653484976305e-05, |
|
"loss": 0.7226, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8372315343732395e-05, |
|
"loss": 0.7466, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8359934434823672e-05, |
|
"loss": 0.7195, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8347510821494593e-05, |
|
"loss": 0.7436, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8335044567207763e-05, |
|
"loss": 0.7357, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8322535735643604e-05, |
|
"loss": 0.7198, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8309984390700038e-05, |
|
"loss": 0.7406, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8297390596492143e-05, |
|
"loss": 0.7428, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.828475441735185e-05, |
|
"loss": 0.7164, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8272075917827597e-05, |
|
"loss": 0.7352, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.8259355162684e-05, |
|
"loss": 0.7577, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.824659221690153e-05, |
|
"loss": 0.7299, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.823378714567618e-05, |
|
"loss": 0.7379, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.822094001441913e-05, |
|
"loss": 0.737, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.82080508887564e-05, |
|
"loss": 0.7348, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8195119834528535e-05, |
|
"loss": 0.7371, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8182146917790264e-05, |
|
"loss": 0.7327, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8169132204810157e-05, |
|
"loss": 0.7157, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.815607576207029e-05, |
|
"loss": 0.7197, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.814297765626589e-05, |
|
"loss": 0.7318, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8129837954305033e-05, |
|
"loss": 0.74, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8116656723308253e-05, |
|
"loss": 0.7085, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.810343403060824e-05, |
|
"loss": 0.7156, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 0.7359, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8076864530487886e-05, |
|
"loss": 0.7539, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8063517858790517e-05, |
|
"loss": 0.7254, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8050129996835147e-05, |
|
"loss": 0.734, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.8036701013009988e-05, |
|
"loss": 0.722, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.80232309759133e-05, |
|
"loss": 0.7254, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.800971995435305e-05, |
|
"loss": 0.7466, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.799616801734657e-05, |
|
"loss": 0.7425, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7982575234120196e-05, |
|
"loss": 0.7188, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.796894167410891e-05, |
|
"loss": 0.7361, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.7955267406955997e-05, |
|
"loss": 0.7336, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7941552502512684e-05, |
|
"loss": 0.742, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.792779703083777e-05, |
|
"loss": 0.7266, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.7914001062197298e-05, |
|
"loss": 0.7148, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.790016466706417e-05, |
|
"loss": 0.7265, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.788628791611779e-05, |
|
"loss": 0.7258, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.787237088024372e-05, |
|
"loss": 0.7338, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7858413630533305e-05, |
|
"loss": 0.7393, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.78444162382833e-05, |
|
"loss": 0.7285, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.7830378774995524e-05, |
|
"loss": 0.7253, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.781630131237649e-05, |
|
"loss": 0.7241, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.780218392233704e-05, |
|
"loss": 0.734, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.778802667699196e-05, |
|
"loss": 0.7445, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7773829648659645e-05, |
|
"loss": 0.7224, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.7759592909861694e-05, |
|
"loss": 0.7142, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.774531653332256e-05, |
|
"loss": 0.7023, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7731000591969182e-05, |
|
"loss": 0.7368, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.77166451589306e-05, |
|
"loss": 0.7322, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.7702250307537583e-05, |
|
"loss": 0.7385, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.768781611132226e-05, |
|
"loss": 0.7326, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7673342644017744e-05, |
|
"loss": 0.7349, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7658829979557757e-05, |
|
"loss": 0.7428, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.764427819207624e-05, |
|
"loss": 0.7335, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.7629687355906988e-05, |
|
"loss": 0.7232, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.761505754558327e-05, |
|
"loss": 0.7228, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.7600388835837427e-05, |
|
"loss": 0.7106, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.758568130160053e-05, |
|
"loss": 0.722, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.757093501800196e-05, |
|
"loss": 0.7311, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.755615006036904e-05, |
|
"loss": 0.7222, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7541326504226654e-05, |
|
"loss": 0.7318, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7526464425296846e-05, |
|
"loss": 0.7172, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7511563899498455e-05, |
|
"loss": 0.7423, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7496625002946702e-05, |
|
"loss": 0.7244, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.7481647811952828e-05, |
|
"loss": 0.7117, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.746663240302368e-05, |
|
"loss": 0.7253, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7451578852861337e-05, |
|
"loss": 0.7325, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.743648723836271e-05, |
|
"loss": 0.7177, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7421357636619153e-05, |
|
"loss": 0.7215, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.7406190124916064e-05, |
|
"loss": 0.7149, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7390984780732493e-05, |
|
"loss": 0.7259, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.737574168174075e-05, |
|
"loss": 0.7317, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7360460905806004e-05, |
|
"loss": 0.7217, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.734514253098589e-05, |
|
"loss": 0.7324, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.7329786635530094e-05, |
|
"loss": 0.7243, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7314393297879982e-05, |
|
"loss": 0.7297, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7298962596668176e-05, |
|
"loss": 0.7373, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7283494610718153e-05, |
|
"loss": 0.7352, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.726798941904386e-05, |
|
"loss": 0.7397, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.7252447100849294e-05, |
|
"loss": 0.7179, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.72368677355281e-05, |
|
"loss": 0.73, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7221251402663176e-05, |
|
"loss": 0.7321, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7205598182026243e-05, |
|
"loss": 0.7197, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.7189908153577473e-05, |
|
"loss": 0.7276, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7174181397465047e-05, |
|
"loss": 0.7238, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7158417994024766e-05, |
|
"loss": 0.7024, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7142618023779623e-05, |
|
"loss": 0.73, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7126781567439418e-05, |
|
"loss": 0.7314, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7110908705900322e-05, |
|
"loss": 0.7162, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.709499952024447e-05, |
|
"loss": 0.7249, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.707905409173955e-05, |
|
"loss": 0.7266, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7063072501838388e-05, |
|
"loss": 0.7372, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7047054832178533e-05, |
|
"loss": 0.7222, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7031001164581828e-05, |
|
"loss": 0.7293, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.7014911581054016e-05, |
|
"loss": 0.7122, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6998786163784295e-05, |
|
"loss": 0.7096, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6982624995144913e-05, |
|
"loss": 0.7276, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.696642815769075e-05, |
|
"loss": 0.7405, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.6950195734158874e-05, |
|
"loss": 0.7231, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6933927807468155e-05, |
|
"loss": 0.7114, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6917624460718812e-05, |
|
"loss": 0.7328, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.690128577719199e-05, |
|
"loss": 0.7272, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.6884911840349354e-05, |
|
"loss": 0.7184, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6868502733832647e-05, |
|
"loss": 0.7283, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6852058541463263e-05, |
|
"loss": 0.7021, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.683557934724183e-05, |
|
"loss": 0.7125, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.6819065235347763e-05, |
|
"loss": 0.7227, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.680251629013885e-05, |
|
"loss": 0.7224, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6785932596150827e-05, |
|
"loss": 0.7021, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6769314238096906e-05, |
|
"loss": 0.7198, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.67526613008674e-05, |
|
"loss": 0.703, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.673597386952924e-05, |
|
"loss": 0.7271, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.6719252029325575e-05, |
|
"loss": 0.7109, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.670249586567531e-05, |
|
"loss": 0.7447, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.668570546417269e-05, |
|
"loss": 0.7149, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6668880910586853e-05, |
|
"loss": 0.7236, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.6652022290861393e-05, |
|
"loss": 0.7187, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.663512969111392e-05, |
|
"loss": 0.7111, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6618203197635624e-05, |
|
"loss": 0.7268, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6601242896890832e-05, |
|
"loss": 0.7048, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.658424887551656e-05, |
|
"loss": 0.7068, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.6567221220322082e-05, |
|
"loss": 0.6816, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.655016001828848e-05, |
|
"loss": 0.727, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6533065356568206e-05, |
|
"loss": 0.7273, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6515937322484627e-05, |
|
"loss": 0.739, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6498776003531575e-05, |
|
"loss": 0.7126, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.6481581487372925e-05, |
|
"loss": 0.7105, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6464353861842115e-05, |
|
"loss": 0.7249, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6447093214941727e-05, |
|
"loss": 0.7217, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6429799634843012e-05, |
|
"loss": 0.7066, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6412473209885466e-05, |
|
"loss": 0.7101, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.6395114028576344e-05, |
|
"loss": 0.7329, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6377722179590237e-05, |
|
"loss": 0.7148, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.636029775176862e-05, |
|
"loss": 0.707, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6342840834119376e-05, |
|
"loss": 0.7226, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6325351515816353e-05, |
|
"loss": 0.7167, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.6307829886198913e-05, |
|
"loss": 0.7145, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.629027603477147e-05, |
|
"loss": 0.7125, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.627269005120304e-05, |
|
"loss": 0.7064, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6255072025326763e-05, |
|
"loss": 0.7353, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.6237422047139472e-05, |
|
"loss": 0.7048, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.621974020680122e-05, |
|
"loss": 0.7319, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6202026594634804e-05, |
|
"loss": 0.7139, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.618428130112533e-05, |
|
"loss": 0.7147, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6166504416919747e-05, |
|
"loss": 0.7332, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.6148696032826354e-05, |
|
"loss": 0.701, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6130856239814382e-05, |
|
"loss": 0.7186, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.611298512901349e-05, |
|
"loss": 0.7168, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6095082791713322e-05, |
|
"loss": 0.7203, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6077149319363035e-05, |
|
"loss": 0.726, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6059184803570826e-05, |
|
"loss": 0.7138, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6041189336103475e-05, |
|
"loss": 0.6974, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6023163008885858e-05, |
|
"loss": 0.7066, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.6005105914000508e-05, |
|
"loss": 0.7231, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.598701814368711e-05, |
|
"loss": 0.7098, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.596889979034205e-05, |
|
"loss": 0.7099, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.595075094651795e-05, |
|
"loss": 0.7043, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5932571704923168e-05, |
|
"loss": 0.7157, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5914362158421352e-05, |
|
"loss": 0.6987, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.589612240003095e-05, |
|
"loss": 0.7252, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.5877852522924733e-05, |
|
"loss": 0.7133, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.585955262042934e-05, |
|
"loss": 0.7132, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.584122278602477e-05, |
|
"loss": 0.7024, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5822863113343934e-05, |
|
"loss": 0.7047, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.580447369617216e-05, |
|
"loss": 0.7125, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5786054628446712e-05, |
|
"loss": 0.6909, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.576760600425632e-05, |
|
"loss": 0.7032, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.57491279178407e-05, |
|
"loss": 0.6956, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5730620463590052e-05, |
|
"loss": 0.7192, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5712083736044613e-05, |
|
"loss": 0.7012, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5693517829894138e-05, |
|
"loss": 0.7207, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5674922839977446e-05, |
|
"loss": 0.7316, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5656298861281917e-05, |
|
"loss": 0.7252, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5637645988943008e-05, |
|
"loss": 0.7055, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.5618964318243775e-05, |
|
"loss": 0.712, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.560025394461439e-05, |
|
"loss": 0.7085, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.558151496363163e-05, |
|
"loss": 0.7346, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.5562747471018415e-05, |
|
"loss": 0.7082, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.554395156264331e-05, |
|
"loss": 0.6151, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.552512733452003e-05, |
|
"loss": 0.6356, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.550627488280695e-05, |
|
"loss": 0.6009, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.5487394303806632e-05, |
|
"loss": 0.6051, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.54684856939653e-05, |
|
"loss": 0.6092, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.544954914987238e-05, |
|
"loss": 0.5934, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.543058476825998e-05, |
|
"loss": 0.5967, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.541159264600242e-05, |
|
"loss": 0.6137, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5392572880115726e-05, |
|
"loss": 0.5997, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5373525567757124e-05, |
|
"loss": 0.5941, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.5354450806224553e-05, |
|
"loss": 0.6123, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5335348692956177e-05, |
|
"loss": 0.5996, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5316219325529882e-05, |
|
"loss": 0.6117, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.529706280166276e-05, |
|
"loss": 0.6068, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5277879219210637e-05, |
|
"loss": 0.6223, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.5258668676167548e-05, |
|
"loss": 0.6001, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5239431270665263e-05, |
|
"loss": 0.5992, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5220167100972763e-05, |
|
"loss": 0.6121, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.5200876265495745e-05, |
|
"loss": 0.6007, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.518155886277613e-05, |
|
"loss": 0.6077, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 1.516221499149154e-05, |
|
"loss": 0.587, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5142844750454807e-05, |
|
"loss": 0.5979, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.512344823861347e-05, |
|
"loss": 0.5998, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5104025555049262e-05, |
|
"loss": 0.6071, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5084576798977608e-05, |
|
"loss": 0.6094, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 1.5065102069747117e-05, |
|
"loss": 0.601, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.504560146683907e-05, |
|
"loss": 0.6088, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.502607508986693e-05, |
|
"loss": 0.6074, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.50065230385758e-05, |
|
"loss": 0.5975, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.498694541284195e-05, |
|
"loss": 0.6112, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 1.4967342312672283e-05, |
|
"loss": 0.5855, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4947713838203835e-05, |
|
"loss": 0.5944, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.492806008970325e-05, |
|
"loss": 0.6267, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4908381167566286e-05, |
|
"loss": 0.5901, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 1.4888677172317294e-05, |
|
"loss": 0.6031, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.48689482046087e-05, |
|
"loss": 0.6035, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4849194365220497e-05, |
|
"loss": 0.6072, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4829415755059726e-05, |
|
"loss": 0.606, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4809612475159968e-05, |
|
"loss": 0.5902, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 1.4789784626680819e-05, |
|
"loss": 0.6053, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4769932310907372e-05, |
|
"loss": 0.5967, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.475005562924971e-05, |
|
"loss": 0.6255, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4730154683242385e-05, |
|
"loss": 0.5976, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4710229574543893e-05, |
|
"loss": 0.6042, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 1.4690280404936155e-05, |
|
"loss": 0.6117, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.467030727632401e-05, |
|
"loss": 0.6102, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.465031029073467e-05, |
|
"loss": 0.6071, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4630289550317234e-05, |
|
"loss": 0.5912, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4610245157342127e-05, |
|
"loss": 0.6072, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 1.4590177214200609e-05, |
|
"loss": 0.5998, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4570085823404232e-05, |
|
"loss": 0.5872, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4549971087584329e-05, |
|
"loss": 0.5864, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4529833109491485e-05, |
|
"loss": 0.6046, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 1.4509671991995003e-05, |
|
"loss": 0.5969, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4489487838082396e-05, |
|
"loss": 0.5983, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4469280750858854e-05, |
|
"loss": 0.5962, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4449050833546709e-05, |
|
"loss": 0.6116, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4428798189484914e-05, |
|
"loss": 0.6112, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 1.4408522922128518e-05, |
|
"loss": 0.6068, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4388225135048137e-05, |
|
"loss": 0.602, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4367904931929422e-05, |
|
"loss": 0.6067, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.4347562416572525e-05, |
|
"loss": 0.5949, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.432719769289158e-05, |
|
"loss": 0.6184, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 1.430681086491416e-05, |
|
"loss": 0.6086, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4286402036780762e-05, |
|
"loss": 0.6294, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4265971312744252e-05, |
|
"loss": 0.6051, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4245518797169353e-05, |
|
"loss": 0.5972, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4225044594532104e-05, |
|
"loss": 0.589, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 1.4204548809419323e-05, |
|
"loss": 0.5991, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4184031546528077e-05, |
|
"loss": 0.6221, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4163492910665153e-05, |
|
"loss": 0.6146, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4142933006746502e-05, |
|
"loss": 0.6034, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4122351939796733e-05, |
|
"loss": 0.607, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 1.4101749814948544e-05, |
|
"loss": 0.6084, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4081126737442216e-05, |
|
"loss": 0.6025, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4060482812625055e-05, |
|
"loss": 0.6093, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4039818145950856e-05, |
|
"loss": 0.5989, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 1.4019132842979375e-05, |
|
"loss": 0.5976, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.399842700937578e-05, |
|
"loss": 0.6095, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3977700750910112e-05, |
|
"loss": 0.5983, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.395695417345675e-05, |
|
"loss": 0.5928, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3936187382993862e-05, |
|
"loss": 0.5971, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 1.3915400485602876e-05, |
|
"loss": 0.6084, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3894593587467924e-05, |
|
"loss": 0.6206, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3873766794875309e-05, |
|
"loss": 0.6129, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3852920214212966e-05, |
|
"loss": 0.6148, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3832053951969893e-05, |
|
"loss": 0.6037, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 1.3811168114735647e-05, |
|
"loss": 0.5953, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3790262809199768e-05, |
|
"loss": 0.6019, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3769338142151245e-05, |
|
"loss": 0.6151, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3748394220477972e-05, |
|
"loss": 0.5969, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3727431151166196e-05, |
|
"loss": 0.6124, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 1.3706449041299984e-05, |
|
"loss": 0.6257, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3685447998060651e-05, |
|
"loss": 0.6091, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3664428128726242e-05, |
|
"loss": 0.5825, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3643389540670963e-05, |
|
"loss": 0.5992, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 1.3622332341364646e-05, |
|
"loss": 0.5983, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3601256638372182e-05, |
|
"loss": 0.6007, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3580162539352993e-05, |
|
"loss": 0.6022, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3559050152060465e-05, |
|
"loss": 0.5982, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3537919584341413e-05, |
|
"loss": 0.6096, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.3516770944135514e-05, |
|
"loss": 0.6141, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3495604339474763e-05, |
|
"loss": 0.5982, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3474419878482935e-05, |
|
"loss": 0.6054, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3453217669374997e-05, |
|
"loss": 0.5907, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3431997820456592e-05, |
|
"loss": 0.5915, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 1.3410760440123475e-05, |
|
"loss": 0.6059, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3389505636860944e-05, |
|
"loss": 0.5987, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3368233519243301e-05, |
|
"loss": 0.6136, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3346944195933294e-05, |
|
"loss": 0.6192, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.3325637775681561e-05, |
|
"loss": 0.6163, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 1.330431436732608e-05, |
|
"loss": 0.5846, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3282974079791603e-05, |
|
"loss": 0.6058, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3261617022089103e-05, |
|
"loss": 0.6149, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3240243303315227e-05, |
|
"loss": 0.6029, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3218853032651719e-05, |
|
"loss": 0.6001, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.3197446319364882e-05, |
|
"loss": 0.6152, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3176023272805008e-05, |
|
"loss": 0.6074, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3154584002405828e-05, |
|
"loss": 0.6069, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.313312861768394e-05, |
|
"loss": 0.5956, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 1.3111657228238263e-05, |
|
"loss": 0.6141, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.5944, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3068666873979443e-05, |
|
"loss": 0.6034, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3047148128770664e-05, |
|
"loss": 0.5903, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3025613818045723e-05, |
|
"loss": 0.6112, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.3004064051806712e-05, |
|
"loss": 0.6021, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.298249894013466e-05, |
|
"loss": 0.5939, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2960918593189005e-05, |
|
"loss": 0.6034, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2939323121206997e-05, |
|
"loss": 0.5986, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2917712634503148e-05, |
|
"loss": 0.6098, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 1.2896087243468673e-05, |
|
"loss": 0.6233, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2874447058570927e-05, |
|
"loss": 0.6034, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2852792190352821e-05, |
|
"loss": 0.6109, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2831122749432278e-05, |
|
"loss": 0.6128, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.280943884650167e-05, |
|
"loss": 0.6072, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.2787740592327232e-05, |
|
"loss": 0.6226, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2766028097748518e-05, |
|
"loss": 0.6147, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2744301473677814e-05, |
|
"loss": 0.6256, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2722560831099595e-05, |
|
"loss": 0.5905, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.2700806281069942e-05, |
|
"loss": 0.6026, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 1.267903793471597e-05, |
|
"loss": 0.5996, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2657255903235278e-05, |
|
"loss": 0.601, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2635460297895367e-05, |
|
"loss": 0.5935, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2613651230033085e-05, |
|
"loss": 0.5993, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.2591828811054034e-05, |
|
"loss": 0.61, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2569993152432028e-05, |
|
"loss": 0.6125, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2548144365708514e-05, |
|
"loss": 0.5933, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2526282562491991e-05, |
|
"loss": 0.6087, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2504407854457455e-05, |
|
"loss": 0.6106, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 1.2482520353345819e-05, |
|
"loss": 0.6147, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2460620170963353e-05, |
|
"loss": 0.6117, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2438707419181097e-05, |
|
"loss": 0.5958, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2416782209934304e-05, |
|
"loss": 0.6034, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2394844655221863e-05, |
|
"loss": 0.605, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 1.2372894867105722e-05, |
|
"loss": 0.6131, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2350932957710322e-05, |
|
"loss": 0.6034, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2328959039222026e-05, |
|
"loss": 0.5992, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2306973223888535e-05, |
|
"loss": 0.5932, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2284975624018327e-05, |
|
"loss": 0.6076, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.2262966351980075e-05, |
|
"loss": 0.6019, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2240945520202079e-05, |
|
"loss": 0.5968, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2218913241171691e-05, |
|
"loss": 0.5906, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2196869627434725e-05, |
|
"loss": 0.5961, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 1.2174814791594913e-05, |
|
"loss": 0.5946, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2152748846313302e-05, |
|
"loss": 0.6178, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2130671904307692e-05, |
|
"loss": 0.6132, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2108584078352048e-05, |
|
"loss": 0.6055, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2086485481275943e-05, |
|
"loss": 0.6161, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.2064376225963972e-05, |
|
"loss": 0.6019, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.2042256425355165e-05, |
|
"loss": 0.604, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.202012619244243e-05, |
|
"loss": 0.6063, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1997985640271956e-05, |
|
"loss": 0.6064, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1975834881942656e-05, |
|
"loss": 0.5977, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 1.1953674030605568e-05, |
|
"loss": 0.6032, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1931503199463292e-05, |
|
"loss": 0.5957, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1909322501769407e-05, |
|
"loss": 0.6105, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1887132050827893e-05, |
|
"loss": 0.601, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.186493195999255e-05, |
|
"loss": 0.6187, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.1842722342666421e-05, |
|
"loss": 0.61, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1820503312301218e-05, |
|
"loss": 0.6164, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1798274982396728e-05, |
|
"loss": 0.6091, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1776037466500245e-05, |
|
"loss": 0.614, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1753790878205993e-05, |
|
"loss": 0.6066, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 1.1731535331154532e-05, |
|
"loss": 0.5934, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1709270939032189e-05, |
|
"loss": 0.605, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1686997815570473e-05, |
|
"loss": 0.6142, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1664716074545498e-05, |
|
"loss": 0.5992, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.1642425829777391e-05, |
|
"loss": 0.6023, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1620127195129724e-05, |
|
"loss": 0.6001, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1597820284508927e-05, |
|
"loss": 0.6173, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.15755052118637e-05, |
|
"loss": 0.6026, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1553182091184439e-05, |
|
"loss": 0.6096, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.1530851036502655e-05, |
|
"loss": 0.6007, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1508512161890381e-05, |
|
"loss": 0.5977, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1486165581459605e-05, |
|
"loss": 0.599, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1463811409361667e-05, |
|
"loss": 0.6017, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1441449759786693e-05, |
|
"loss": 0.6141, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 1.1419080746963012e-05, |
|
"loss": 0.6186, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1396704485156552e-05, |
|
"loss": 0.6036, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1374321088670277e-05, |
|
"loss": 0.5929, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.13519306718436e-05, |
|
"loss": 0.5959, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1329533349051794e-05, |
|
"loss": 0.603, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.1307129234705404e-05, |
|
"loss": 0.6022, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1284718443249676e-05, |
|
"loss": 0.6055, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1262301089163958e-05, |
|
"loss": 0.5883, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1239877286961123e-05, |
|
"loss": 0.5925, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 1.1217447151186983e-05, |
|
"loss": 0.6045, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.11950107964197e-05, |
|
"loss": 0.59, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1172568337269212e-05, |
|
"loss": 0.5975, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1150119888376631e-05, |
|
"loss": 0.6234, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.112766556441367e-05, |
|
"loss": 0.6048, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.1105205480082052e-05, |
|
"loss": 0.6176, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1082739750112932e-05, |
|
"loss": 0.5973, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.106026848926629e-05, |
|
"loss": 0.5992, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1037791812330372e-05, |
|
"loss": 0.6074, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.1015309834121083e-05, |
|
"loss": 0.6017, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 1.099282266948141e-05, |
|
"loss": 0.6007, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0970330433280838e-05, |
|
"loss": 0.5963, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0947833240414751e-05, |
|
"loss": 0.608, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0925331205803861e-05, |
|
"loss": 0.6071, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0902824444393602e-05, |
|
"loss": 0.5891, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.0880313071153568e-05, |
|
"loss": 0.6158, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0857797201076897e-05, |
|
"loss": 0.601, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0835276949179707e-05, |
|
"loss": 0.5962, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0812752430500492e-05, |
|
"loss": 0.5888, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.079022376009955e-05, |
|
"loss": 0.6049, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 1.0767691053058385e-05, |
|
"loss": 0.6055, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0745154424479112e-05, |
|
"loss": 0.604, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0722613989483889e-05, |
|
"loss": 0.5954, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.0700069863214317e-05, |
|
"loss": 0.5775, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 1.067752216083085e-05, |
|
"loss": 0.5953, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0654970997512201e-05, |
|
"loss": 0.5995, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.063241648845478e-05, |
|
"loss": 0.6056, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.0609858748872073e-05, |
|
"loss": 0.6019, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.058729789399408e-05, |
|
"loss": 0.6021, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 1.05647340390667e-05, |
|
"loss": 0.6093, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0542167299351171e-05, |
|
"loss": 0.6053, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0519597790123463e-05, |
|
"loss": 0.5938, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0497025626673687e-05, |
|
"loss": 0.6094, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.047445092430552e-05, |
|
"loss": 0.5742, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 1.0451873798335605e-05, |
|
"loss": 0.5916, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0429294364092968e-05, |
|
"loss": 0.6093, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0406712736918418e-05, |
|
"loss": 0.6257, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0384129032163976e-05, |
|
"loss": 0.6083, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0361543365192275e-05, |
|
"loss": 0.5845, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 1.0338955851375962e-05, |
|
"loss": 0.6021, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0316366606097128e-05, |
|
"loss": 0.6016, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0293775744746705e-05, |
|
"loss": 0.6039, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0271183382723881e-05, |
|
"loss": 0.5969, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 1.0248589635435505e-05, |
|
"loss": 0.5928, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0225994618295507e-05, |
|
"loss": 0.5867, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0203398446724306e-05, |
|
"loss": 0.6145, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0180801236148217e-05, |
|
"loss": 0.6058, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0158203101998854e-05, |
|
"loss": 0.6098, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.0135604159712558e-05, |
|
"loss": 0.6101, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.01130045247298e-05, |
|
"loss": 0.6032, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.009040431249458e-05, |
|
"loss": 0.6147, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0067803638453847e-05, |
|
"loss": 0.6062, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0045202618056925e-05, |
|
"loss": 0.5942, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 1.0022601366754889e-05, |
|
"loss": 0.5874, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6032, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.977398633245116e-06, |
|
"loss": 0.59, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.954797381943079e-06, |
|
"loss": 0.5921, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.932196361546156e-06, |
|
"loss": 0.5938, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.909595687505424e-06, |
|
"loss": 0.6053, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.886995475270205e-06, |
|
"loss": 0.5989, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.864395840287442e-06, |
|
"loss": 0.5939, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.84179689800115e-06, |
|
"loss": 0.6096, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.819198763851785e-06, |
|
"loss": 0.6114, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.796601553275697e-06, |
|
"loss": 0.6115, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.774005381704498e-06, |
|
"loss": 0.6057, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.751410364564499e-06, |
|
"loss": 0.589, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.728816617276124e-06, |
|
"loss": 0.5872, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.706224255253297e-06, |
|
"loss": 0.605, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.683633393902876e-06, |
|
"loss": 0.6136, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.661044148624038e-06, |
|
"loss": 0.5943, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.638456634807728e-06, |
|
"loss": 0.5823, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.615870967836026e-06, |
|
"loss": 0.6053, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.593287263081586e-06, |
|
"loss": 0.6025, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.570705635907038e-06, |
|
"loss": 0.6144, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.548126201664398e-06, |
|
"loss": 0.6019, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.525549075694484e-06, |
|
"loss": 0.6015, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.502974373326315e-06, |
|
"loss": 0.5999, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.48040220987654e-06, |
|
"loss": 0.597, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.45783270064883e-06, |
|
"loss": 0.6014, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.435265960933304e-06, |
|
"loss": 0.5957, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.412702106005927e-06, |
|
"loss": 0.5966, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.39014125112793e-06, |
|
"loss": 0.5986, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.367583511545225e-06, |
|
"loss": 0.603, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.3450290024878e-06, |
|
"loss": 0.6023, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.322477839169156e-06, |
|
"loss": 0.5999, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.299930136785685e-06, |
|
"loss": 0.5916, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.277386010516113e-06, |
|
"loss": 0.5867, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.25484557552089e-06, |
|
"loss": 0.6062, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.232308946941618e-06, |
|
"loss": 0.6077, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.209776239900453e-06, |
|
"loss": 0.5868, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.187247569499511e-06, |
|
"loss": 0.5915, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.164723050820298e-06, |
|
"loss": 0.6107, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.142202798923106e-06, |
|
"loss": 0.6107, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.119686928846437e-06, |
|
"loss": 0.5878, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.097175555606396e-06, |
|
"loss": 0.6061, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.074668794196142e-06, |
|
"loss": 0.5775, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.052166759585252e-06, |
|
"loss": 0.5831, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.029669566719165e-06, |
|
"loss": 0.605, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.007177330518595e-06, |
|
"loss": 0.5996, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.98469016587892e-06, |
|
"loss": 0.6113, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.962208187669633e-06, |
|
"loss": 0.6015, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.939731510733711e-06, |
|
"loss": 0.5971, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.917260249887072e-06, |
|
"loss": 0.6026, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.894794519917947e-06, |
|
"loss": 0.6022, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.872334435586333e-06, |
|
"loss": 0.6129, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.849880111623374e-06, |
|
"loss": 0.6004, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.82743166273079e-06, |
|
"loss": 0.596, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.804989203580303e-06, |
|
"loss": 0.602, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.78255284881302e-06, |
|
"loss": 0.6076, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.76012271303888e-06, |
|
"loss": 0.5994, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.737698910836045e-06, |
|
"loss": 0.6016, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.715281556750327e-06, |
|
"loss": 0.5853, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.692870765294595e-06, |
|
"loss": 0.5929, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.670466650948208e-06, |
|
"loss": 0.6093, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.648069328156403e-06, |
|
"loss": 0.5955, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.625678911329727e-06, |
|
"loss": 0.6093, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.603295514843453e-06, |
|
"loss": 0.6046, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.580919253036991e-06, |
|
"loss": 0.6037, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.558550240213309e-06, |
|
"loss": 0.6008, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.536188590638334e-06, |
|
"loss": 0.5981, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.513834418540398e-06, |
|
"loss": 0.5955, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.491487838109622e-06, |
|
"loss": 0.5799, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.46914896349735e-06, |
|
"loss": 0.6214, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.446817908815566e-06, |
|
"loss": 0.5897, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.424494788136303e-06, |
|
"loss": 0.5922, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.402179715491078e-06, |
|
"loss": 0.591, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.379872804870277e-06, |
|
"loss": 0.5879, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.357574170222612e-06, |
|
"loss": 0.5902, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.335283925454505e-06, |
|
"loss": 0.6004, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.313002184429529e-06, |
|
"loss": 0.5954, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.290729060967817e-06, |
|
"loss": 0.5996, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.268464668845471e-06, |
|
"loss": 0.5975, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.24620912179401e-06, |
|
"loss": 0.6035, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.223962533499757e-06, |
|
"loss": 0.5883, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.201725017603277e-06, |
|
"loss": 0.5916, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.179496687698785e-06, |
|
"loss": 0.6078, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.157277657333582e-06, |
|
"loss": 0.6136, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.135068040007452e-06, |
|
"loss": 0.5809, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.11286794917211e-06, |
|
"loss": 0.5955, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.090677498230598e-06, |
|
"loss": 0.5925, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.068496800536712e-06, |
|
"loss": 0.6077, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.046325969394437e-06, |
|
"loss": 0.6026, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.024165118057344e-06, |
|
"loss": 0.6027, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.002014359728046e-06, |
|
"loss": 0.5894, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.97987380755757e-06, |
|
"loss": 0.5863, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.957743574644837e-06, |
|
"loss": 0.6012, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.93562377403603e-06, |
|
"loss": 0.5972, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.913514518724059e-06, |
|
"loss": 0.5873, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.891415921647957e-06, |
|
"loss": 0.6016, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.869328095692313e-06, |
|
"loss": 0.5999, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.847251153686701e-06, |
|
"loss": 0.6118, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.825185208405089e-06, |
|
"loss": 0.5816, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.803130372565277e-06, |
|
"loss": 0.5904, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.781086758828314e-06, |
|
"loss": 0.5886, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.759054479797924e-06, |
|
"loss": 0.5952, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.73703364801993e-06, |
|
"loss": 0.586, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.715024375981676e-06, |
|
"loss": 0.5892, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.69302677611147e-06, |
|
"loss": 0.5931, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.671040960777977e-06, |
|
"loss": 0.5866, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.649067042289681e-06, |
|
"loss": 0.5848, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.627105132894279e-06, |
|
"loss": 0.5978, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.6051553447781415e-06, |
|
"loss": 0.5903, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.583217790065697e-06, |
|
"loss": 0.588, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.561292580818906e-06, |
|
"loss": 0.5977, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.539379829036652e-06, |
|
"loss": 0.5761, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.517479646654184e-06, |
|
"loss": 0.5964, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.4955921455425505e-06, |
|
"loss": 0.6127, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.47371743750801e-06, |
|
"loss": 0.5961, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.451855634291488e-06, |
|
"loss": 0.6008, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.430006847567972e-06, |
|
"loss": 0.5684, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.4081711889459695e-06, |
|
"loss": 0.599, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.386348769966918e-06, |
|
"loss": 0.589, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.364539702104635e-06, |
|
"loss": 0.5924, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.342744096764727e-06, |
|
"loss": 0.5979, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.320962065284032e-06, |
|
"loss": 0.5818, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.299193718930062e-06, |
|
"loss": 0.6101, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.277439168900404e-06, |
|
"loss": 0.6078, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.255698526322188e-06, |
|
"loss": 0.5949, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.233971902251483e-06, |
|
"loss": 0.5991, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.2122594076727705e-06, |
|
"loss": 0.5969, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.190561153498334e-06, |
|
"loss": 0.5978, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.1688772505677225e-06, |
|
"loss": 0.5922, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.147207809647183e-06, |
|
"loss": 0.602, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.125552941429077e-06, |
|
"loss": 0.5937, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.1039127565313285e-06, |
|
"loss": 0.5933, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.082287365496852e-06, |
|
"loss": 0.5831, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.060676878793007e-06, |
|
"loss": 0.6017, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.0390814068109965e-06, |
|
"loss": 0.622, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.0175010598653414e-06, |
|
"loss": 0.5848, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.995935948193294e-06, |
|
"loss": 0.5819, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.9743861819542794e-06, |
|
"loss": 0.5769, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.9528518712293405e-06, |
|
"loss": 0.5951, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.931333126020561e-06, |
|
"loss": 0.593, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 0.5998, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.888342771761737e-06, |
|
"loss": 0.5901, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.866871382316063e-06, |
|
"loss": 0.5963, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.8454159975941725e-06, |
|
"loss": 0.5919, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.823976727194994e-06, |
|
"loss": 0.5855, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.8025536806351235e-06, |
|
"loss": 0.5908, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.781146967348283e-06, |
|
"loss": 0.5841, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.759756696684776e-06, |
|
"loss": 0.5977, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.738382977910898e-06, |
|
"loss": 0.5929, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.7170259202084e-06, |
|
"loss": 0.5824, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.695685632673919e-06, |
|
"loss": 0.593, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.6743622243184405e-06, |
|
"loss": 0.597, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.653055804066712e-06, |
|
"loss": 0.5984, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.631766480756703e-06, |
|
"loss": 0.5943, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.6104943631390596e-06, |
|
"loss": 0.5978, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.589239559876525e-06, |
|
"loss": 0.6056, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.568002179543409e-06, |
|
"loss": 0.5996, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.546782330625004e-06, |
|
"loss": 0.6081, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.525580121517069e-06, |
|
"loss": 0.5832, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.504395660525236e-06, |
|
"loss": 0.6012, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.48322905586449e-06, |
|
"loss": 0.598, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.462080415658591e-06, |
|
"loss": 0.6086, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.440949847939538e-06, |
|
"loss": 0.5897, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.419837460647014e-06, |
|
"loss": 0.5846, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.39874336162782e-06, |
|
"loss": 0.5828, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.377667658635358e-06, |
|
"loss": 0.5882, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.356610459329038e-06, |
|
"loss": 0.6113, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.335571871273762e-06, |
|
"loss": 0.5926, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.314552001939351e-06, |
|
"loss": 0.5963, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.293550958700021e-06, |
|
"loss": 0.5915, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.272568848833809e-06, |
|
"loss": 0.594, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.251605779522032e-06, |
|
"loss": 0.5974, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.230661857848759e-06, |
|
"loss": 0.5963, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.209737190800235e-06, |
|
"loss": 0.5841, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.188831885264357e-06, |
|
"loss": 0.591, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.167946048030107e-06, |
|
"loss": 0.5989, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.147079785787038e-06, |
|
"loss": 0.6103, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.126233205124692e-06, |
|
"loss": 0.5967, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.105406412532078e-06, |
|
"loss": 0.5847, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.084599514397127e-06, |
|
"loss": 0.6069, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.06381261700614e-06, |
|
"loss": 0.5735, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.043045826543254e-06, |
|
"loss": 0.5941, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.022299249089889e-06, |
|
"loss": 0.5964, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 6.001572990624222e-06, |
|
"loss": 0.597, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.980867157020624e-06, |
|
"loss": 0.6017, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.960181854049146e-06, |
|
"loss": 0.6045, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.93951718737495e-06, |
|
"loss": 0.5989, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.918873262557785e-06, |
|
"loss": 0.5891, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.8982501850514614e-06, |
|
"loss": 0.6061, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.8776480602032715e-06, |
|
"loss": 0.5981, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.857066993253501e-06, |
|
"loss": 0.5809, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.836507089334849e-06, |
|
"loss": 0.6019, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.815968453471923e-06, |
|
"loss": 0.5965, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.79545119058068e-06, |
|
"loss": 0.5957, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7749554054679015e-06, |
|
"loss": 0.6035, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.754481202830653e-06, |
|
"loss": 0.6129, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7340286872557515e-06, |
|
"loss": 0.5987, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.713597963219243e-06, |
|
"loss": 0.5864, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.693189135085839e-06, |
|
"loss": 0.5903, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.672802307108423e-06, |
|
"loss": 0.5963, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.652437583427478e-06, |
|
"loss": 0.5818, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.6320950680705826e-06, |
|
"loss": 0.5899, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.6117748649518665e-06, |
|
"loss": 0.5981, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.591477077871485e-06, |
|
"loss": 0.5913, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.5712018105150914e-06, |
|
"loss": 0.5929, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.550949166453293e-06, |
|
"loss": 0.5835, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.530719249141148e-06, |
|
"loss": 0.6026, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.5105121619176064e-06, |
|
"loss": 0.5827, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.490328008005002e-06, |
|
"loss": 0.5979, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.470166890508519e-06, |
|
"loss": 0.5955, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.450028912415672e-06, |
|
"loss": 0.5694, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.429914176595772e-06, |
|
"loss": 0.6043, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.409822785799393e-06, |
|
"loss": 0.5892, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.389754842657875e-06, |
|
"loss": 0.5941, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.369710449682767e-06, |
|
"loss": 0.5915, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.3496897092653335e-06, |
|
"loss": 0.587, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.329692723675994e-06, |
|
"loss": 0.5782, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.309719595063848e-06, |
|
"loss": 0.581, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.289770425456109e-06, |
|
"loss": 0.5407, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.2698453167576155e-06, |
|
"loss": 0.4766, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.249944370750293e-06, |
|
"loss": 0.4872, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.230067689092629e-06, |
|
"loss": 0.4779, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.210215373319183e-06, |
|
"loss": 0.4726, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.190387524840033e-06, |
|
"loss": 0.459, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.170584244940275e-06, |
|
"loss": 0.4846, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.150805634779505e-06, |
|
"loss": 0.4659, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.131051795391302e-06, |
|
"loss": 0.4615, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.111322827682706e-06, |
|
"loss": 0.4612, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.091618832433716e-06, |
|
"loss": 0.4593, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.0719399102967536e-06, |
|
"loss": 0.4594, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.0522861617961694e-06, |
|
"loss": 0.4552, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.03265768732772e-06, |
|
"loss": 0.4732, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.0130545871580504e-06, |
|
"loss": 0.4634, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.993476961424203e-06, |
|
"loss": 0.474, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.973924910133071e-06, |
|
"loss": 0.4566, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.95439853316093e-06, |
|
"loss": 0.4806, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.934897930252887e-06, |
|
"loss": 0.4489, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.915423201022396e-06, |
|
"loss": 0.4648, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.895974444950743e-06, |
|
"loss": 0.4698, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.876551761386532e-06, |
|
"loss": 0.4633, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.857155249545197e-06, |
|
"loss": 0.4625, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.837785008508462e-06, |
|
"loss": 0.4707, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.8184411372238724e-06, |
|
"loss": 0.4653, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.799123734504258e-06, |
|
"loss": 0.4565, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.779832899027243e-06, |
|
"loss": 0.4661, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.7605687293347425e-06, |
|
"loss": 0.4633, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.7413313238324556e-06, |
|
"loss": 0.4689, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.722120780789369e-06, |
|
"loss": 0.4651, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.702937198337241e-06, |
|
"loss": 0.4652, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.68378067447012e-06, |
|
"loss": 0.4551, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.66465130704382e-06, |
|
"loss": 0.4777, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.645549193775452e-06, |
|
"loss": 0.4744, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.626474432242879e-06, |
|
"loss": 0.4507, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.607427119884276e-06, |
|
"loss": 0.4726, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.58840735399758e-06, |
|
"loss": 0.4493, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.569415231740021e-06, |
|
"loss": 0.4676, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.550450850127626e-06, |
|
"loss": 0.4486, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.531514306034701e-06, |
|
"loss": 0.4674, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.512605696193371e-06, |
|
"loss": 0.4499, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.493725117193051e-06, |
|
"loss": 0.4651, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.474872665479974e-06, |
|
"loss": 0.4837, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.4560484373566945e-06, |
|
"loss": 0.4676, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.437252528981586e-06, |
|
"loss": 0.4705, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.4184850363683726e-06, |
|
"loss": 0.4597, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3997460553856095e-06, |
|
"loss": 0.4717, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.381035681756223e-06, |
|
"loss": 0.4649, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3623540110569935e-06, |
|
"loss": 0.4632, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.343701138718086e-06, |
|
"loss": 0.4819, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3250771600225536e-06, |
|
"loss": 0.4614, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.306482170105864e-06, |
|
"loss": 0.4492, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.2879162639553925e-06, |
|
"loss": 0.4657, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.26937953640995e-06, |
|
"loss": 0.4648, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.250872082159305e-06, |
|
"loss": 0.4628, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.232393995743682e-06, |
|
"loss": 0.4597, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.213945371553292e-06, |
|
"loss": 0.4459, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.195526303827842e-06, |
|
"loss": 0.4814, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.177136886656067e-06, |
|
"loss": 0.4537, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.158777213975231e-06, |
|
"loss": 0.4669, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.140447379570663e-06, |
|
"loss": 0.4706, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.12214747707527e-06, |
|
"loss": 0.473, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.103877599969056e-06, |
|
"loss": 0.4625, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.085637841578652e-06, |
|
"loss": 0.4752, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.067428295076833e-06, |
|
"loss": 0.455, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.0492490534820525e-06, |
|
"loss": 0.4531, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.0311002096579486e-06, |
|
"loss": 0.4553, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.012981856312893e-06, |
|
"loss": 0.475, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.9948940859994964e-06, |
|
"loss": 0.4573, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.976836991114146e-06, |
|
"loss": 0.4669, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.958810663896531e-06, |
|
"loss": 0.4573, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.9408151964291764e-06, |
|
"loss": 0.4502, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.922850680636968e-06, |
|
"loss": 0.4623, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.9049172082866786e-06, |
|
"loss": 0.4722, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.8870148709865115e-06, |
|
"loss": 0.4664, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.869143760185618e-06, |
|
"loss": 0.4625, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.851303967173647e-06, |
|
"loss": 0.4522, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.833495583080261e-06, |
|
"loss": 0.4453, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.815718698874672e-06, |
|
"loss": 0.4543, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7979734053652028e-06, |
|
"loss": 0.4543, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.780259793198784e-06, |
|
"loss": 0.4593, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.762577952860529e-06, |
|
"loss": 0.4818, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.744927974673237e-06, |
|
"loss": 0.4663, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.727309948796963e-06, |
|
"loss": 0.4588, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.709723965228531e-06, |
|
"loss": 0.472, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.69217011380109e-06, |
|
"loss": 0.4601, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6746484841836516e-06, |
|
"loss": 0.4524, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.657159165880626e-06, |
|
"loss": 0.4693, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6397022482313804e-06, |
|
"loss": 0.4621, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6222778204097607e-06, |
|
"loss": 0.4692, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6048859714236597e-06, |
|
"loss": 0.4701, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5875267901145383e-06, |
|
"loss": 0.4768, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5702003651569883e-06, |
|
"loss": 0.4762, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.552906785058278e-06, |
|
"loss": 0.465, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.5356461381578865e-06, |
|
"loss": 0.459, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.51841851262708e-06, |
|
"loss": 0.4612, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.501223996468426e-06, |
|
"loss": 0.4577, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.484062677515376e-06, |
|
"loss": 0.4672, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.466934643431795e-06, |
|
"loss": 0.4577, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.4498399817115213e-06, |
|
"loss": 0.4587, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.432778779677921e-06, |
|
"loss": 0.4594, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.415751124483445e-06, |
|
"loss": 0.4596, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.3987571031091735e-06, |
|
"loss": 0.4659, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3817968023643766e-06, |
|
"loss": 0.4658, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.36487030888608e-06, |
|
"loss": 0.4734, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3479777091386088e-06, |
|
"loss": 0.4502, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3311190894131495e-06, |
|
"loss": 0.4611, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3142945358273114e-06, |
|
"loss": 0.4559, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2975041343246937e-06, |
|
"loss": 0.4605, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.280747970674427e-06, |
|
"loss": 0.4515, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.264026130470762e-06, |
|
"loss": 0.4735, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.247338699132604e-06, |
|
"loss": 0.4781, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.230685761903094e-06, |
|
"loss": 0.4597, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.214067403849179e-06, |
|
"loss": 0.464, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1974837098611487e-06, |
|
"loss": 0.4644, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1809347646522414e-06, |
|
"loss": 0.4657, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1644206527581734e-06, |
|
"loss": 0.4614, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1479414585367396e-06, |
|
"loss": 0.4576, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.1314972661673572e-06, |
|
"loss": 0.4588, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.115088159650648e-06, |
|
"loss": 0.4618, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.0987142228080137e-06, |
|
"loss": 0.4598, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.082375539281194e-06, |
|
"loss": 0.4643, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0660721925318483e-06, |
|
"loss": 0.4618, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0498042658411276e-06, |
|
"loss": 0.4742, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0335718423092553e-06, |
|
"loss": 0.4602, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0173750048550866e-06, |
|
"loss": 0.4555, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0012138362157062e-06, |
|
"loss": 0.4655, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.985088418945986e-06, |
|
"loss": 0.4642, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9689988354181742e-06, |
|
"loss": 0.4665, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9529451678214726e-06, |
|
"loss": 0.4727, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9369274981616137e-06, |
|
"loss": 0.4674, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.920945908260453e-06, |
|
"loss": 0.4588, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.905000479755531e-06, |
|
"loss": 0.4619, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8890912940996784e-06, |
|
"loss": 0.4739, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8732184325605815e-06, |
|
"loss": 0.472, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.857381976220379e-06, |
|
"loss": 0.4505, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8415820059752397e-06, |
|
"loss": 0.4853, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8258186025349543e-06, |
|
"loss": 0.4583, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8100918464225304e-06, |
|
"loss": 0.4633, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7944018179737576e-06, |
|
"loss": 0.4623, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7787485973368288e-06, |
|
"loss": 0.4528, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.763132264471903e-06, |
|
"loss": 0.464, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7475528991507106e-06, |
|
"loss": 0.4699, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7320105809561415e-06, |
|
"loss": 0.459, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7165053892818495e-06, |
|
"loss": 0.4715, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.701037403331829e-06, |
|
"loss": 0.4609, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.685606702120019e-06, |
|
"loss": 0.4597, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.670213364469908e-06, |
|
"loss": 0.4585, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.654857469014113e-06, |
|
"loss": 0.4775, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6395390941940002e-06, |
|
"loss": 0.4768, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.624258318259253e-06, |
|
"loss": 0.4611, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.609015219267511e-06, |
|
"loss": 0.4683, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5938098750839414e-06, |
|
"loss": 0.4576, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5786423633808487e-06, |
|
"loss": 0.4609, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.563512761637291e-06, |
|
"loss": 0.4602, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5484211471386633e-06, |
|
"loss": 0.4702, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5333675969763215e-06, |
|
"loss": 0.4559, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.518352188047176e-06, |
|
"loss": 0.4603, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5033749970533015e-06, |
|
"loss": 0.4501, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.488436100501549e-06, |
|
"loss": 0.4582, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.4735355747031566e-06, |
|
"loss": 0.4747, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4586734957733495e-06, |
|
"loss": 0.4564, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.443849939630959e-06, |
|
"loss": 0.462, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4290649819980404e-06, |
|
"loss": 0.4852, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4143186983994715e-06, |
|
"loss": 0.4698, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3996111641625762e-06, |
|
"loss": 0.4694, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.384942454416734e-06, |
|
"loss": 0.4537, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3703126440930137e-06, |
|
"loss": 0.4706, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3557218079237608e-06, |
|
"loss": 0.4775, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3411700204422437e-06, |
|
"loss": 0.4642, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3266573559822568e-06, |
|
"loss": 0.449, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3121838886777437e-06, |
|
"loss": 0.4571, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.2977496924624223e-06, |
|
"loss": 0.4673, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.283354841069403e-06, |
|
"loss": 0.4612, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.26899940803082e-06, |
|
"loss": 0.4525, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2546834666774397e-06, |
|
"loss": 0.453, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.240407090138309e-06, |
|
"loss": 0.4704, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.226170351340358e-06, |
|
"loss": 0.4627, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.211973323008041e-06, |
|
"loss": 0.4606, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.197816077662965e-06, |
|
"loss": 0.4636, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.183698687623511e-06, |
|
"loss": 0.4595, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.16962122500448e-06, |
|
"loss": 0.4706, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.155583761716703e-06, |
|
"loss": 0.4493, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1415863694666973e-06, |
|
"loss": 0.4553, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1276291197562772e-06, |
|
"loss": 0.4491, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1137120838822124e-06, |
|
"loss": 0.4658, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0998353329358355e-06, |
|
"loss": 0.4554, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0859989378027033e-06, |
|
"loss": 0.4621, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.072202969162234e-06, |
|
"loss": 0.4668, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0584474974873204e-06, |
|
"loss": 0.4558, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0447325930440043e-06, |
|
"loss": 0.4606, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.03105832589109e-06, |
|
"loss": 0.4583, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0174247658798054e-06, |
|
"loss": 0.4545, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0038319826534312e-06, |
|
"loss": 0.4672, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.990280045646954e-06, |
|
"loss": 0.4522, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9767690240867054e-06, |
|
"loss": 0.4497, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9632989869900145e-06, |
|
"loss": 0.4715, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9498700031648554e-06, |
|
"loss": 0.4499, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.936482141209486e-06, |
|
"loss": 0.4789, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.923135469512113e-06, |
|
"loss": 0.4622, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9098300562505266e-06, |
|
"loss": 0.4555, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8965659693917626e-06, |
|
"loss": 0.4666, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8833432766917514e-06, |
|
"loss": 0.4514, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.870162045694971e-06, |
|
"loss": 0.4603, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8570223437341118e-06, |
|
"loss": 0.4645, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8439242379297129e-06, |
|
"loss": 0.453, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8308677951898435e-06, |
|
"loss": 0.4617, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8178530822097373e-06, |
|
"loss": 0.4675, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.8048801654714687e-06, |
|
"loss": 0.4485, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7919491112436038e-06, |
|
"loss": 0.4612, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7790599855808732e-06, |
|
"loss": 0.4546, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.76621285432382e-06, |
|
"loss": 0.4599, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7534077830984697e-06, |
|
"loss": 0.4511, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7406448373160024e-06, |
|
"loss": 0.4538, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7279240821724063e-06, |
|
"loss": 0.4675, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7152455826481529e-06, |
|
"loss": 0.4618, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7026094035078589e-06, |
|
"loss": 0.4559, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.6900156092999664e-06, |
|
"loss": 0.4544, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6774642643563955e-06, |
|
"loss": 0.4581, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.664955432792239e-06, |
|
"loss": 0.4593, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6524891785054097e-06, |
|
"loss": 0.4415, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6400655651763308e-06, |
|
"loss": 0.4629, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6276846562676085e-06, |
|
"loss": 0.4665, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.615346515023698e-06, |
|
"loss": 0.4638, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.603051204470597e-06, |
|
"loss": 0.4633, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5907987874155074e-06, |
|
"loss": 0.4696, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5785893264465257e-06, |
|
"loss": 0.4519, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.566422883932317e-06, |
|
"loss": 0.4716, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5542995220217961e-06, |
|
"loss": 0.4656, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5422193026438225e-06, |
|
"loss": 0.4593, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.530182287506855e-06, |
|
"loss": 0.4595, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.518188538098677e-06, |
|
"loss": 0.468, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.506238115686044e-06, |
|
"loss": 0.451, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4943310813144006e-06, |
|
"loss": 0.4646, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4824674958075436e-06, |
|
"loss": 0.4664, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4706474197673336e-06, |
|
"loss": 0.4614, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.458870913573368e-06, |
|
"loss": 0.4599, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4471380373826826e-06, |
|
"loss": 0.4593, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4354488511294418e-06, |
|
"loss": 0.4598, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4238034145246271e-06, |
|
"loss": 0.4514, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4122017870557458e-06, |
|
"loss": 0.4718, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4006440279865085e-06, |
|
"loss": 0.444, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3891301963565473e-06, |
|
"loss": 0.4586, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3776603509810938e-06, |
|
"loss": 0.4647, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3662345504506903e-06, |
|
"loss": 0.4564, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3548528531308892e-06, |
|
"loss": 0.4574, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.343515317161952e-06, |
|
"loss": 0.4552, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3322220004585596e-06, |
|
"loss": 0.4638, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3209729607095022e-06, |
|
"loss": 0.4593, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3097682553774028e-06, |
|
"loss": 0.4537, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.2986079416984088e-06, |
|
"loss": 0.4583, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2874920766819032e-06, |
|
"loss": 0.4628, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2764207171102206e-06, |
|
"loss": 0.4795, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2653939195383448e-06, |
|
"loss": 0.4731, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2544117402936373e-06, |
|
"loss": 0.4616, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.243474235475528e-06, |
|
"loss": 0.4581, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.232581460955249e-06, |
|
"loss": 0.4685, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2217334723755348e-06, |
|
"loss": 0.4544, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2109303251503434e-06, |
|
"loss": 0.4596, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2001720744645729e-06, |
|
"loss": 0.4472, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.189458775273784e-06, |
|
"loss": 0.4597, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.178790482303912e-06, |
|
"loss": 0.4578, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1681672500509866e-06, |
|
"loss": 0.4556, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1575891327808664e-06, |
|
"loss": 0.4628, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.147056184528943e-06, |
|
"loss": 0.4535, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1365684590998837e-06, |
|
"loss": 0.4616, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1261260100673355e-06, |
|
"loss": 0.4566, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1157288907736762e-06, |
|
"loss": 0.4586, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1053771543297198e-06, |
|
"loss": 0.4507, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0950708536144616e-06, |
|
"loss": 0.4499, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0848100412747954e-06, |
|
"loss": 0.4681, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0745947697252512e-06, |
|
"loss": 0.448, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0644250911477306e-06, |
|
"loss": 0.4619, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0543010574912305e-06, |
|
"loss": 0.4632, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0442227204715872e-06, |
|
"loss": 0.4474, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0341901315712044e-06, |
|
"loss": 0.4454, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0242033420388008e-06, |
|
"loss": 0.4503, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0142624028891334e-06, |
|
"loss": 0.4584, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0043673649027519e-06, |
|
"loss": 0.4472, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.945182786257301e-07, |
|
"loss": 0.4555, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.8471519436941e-07, |
|
"loss": 0.4651, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.749581622101424e-07, |
|
"loss": 0.4691, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.652472319890372e-07, |
|
"loss": 0.4531, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.555824533117064e-07, |
|
"loss": 0.4599, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.459638755480038e-07, |
|
"loss": 0.4692, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.363915478317875e-07, |
|
"loss": 0.4682, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.268655190606501e-07, |
|
"loss": 0.4654, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.173858378956856e-07, |
|
"loss": 0.4594, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.079525527612321e-07, |
|
"loss": 0.4478, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 8.985657118446223e-07, |
|
"loss": 0.4542, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.892253630959502e-07, |
|
"loss": 0.4608, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.799315542278075e-07, |
|
"loss": 0.4559, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.706843327150605e-07, |
|
"loss": 0.4527, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.614837457945868e-07, |
|
"loss": 0.444, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.523298404650504e-07, |
|
"loss": 0.4639, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.432226634866514e-07, |
|
"loss": 0.4735, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.34162261380892e-07, |
|
"loss": 0.4589, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.25148680430341e-07, |
|
"loss": 0.4547, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.161819666783888e-07, |
|
"loss": 0.467, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.072621659290247e-07, |
|
"loss": 0.4687, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.983893237465878e-07, |
|
"loss": 0.4652, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.895634854555512e-07, |
|
"loss": 0.4749, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.807846961402699e-07, |
|
"loss": 0.4602, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.720530006447735e-07, |
|
"loss": 0.4494, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.633684435725208e-07, |
|
"loss": 0.4673, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.547310692861754e-07, |
|
"loss": 0.4513, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.461409219073857e-07, |
|
"loss": 0.4639, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.375980453165487e-07, |
|
"loss": 0.4428, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.291024831525961e-07, |
|
"loss": 0.449, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.206542788127646e-07, |
|
"loss": 0.4517, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.122534754523768e-07, |
|
"loss": 0.4677, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.039001159846215e-07, |
|
"loss": 0.463, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.955942430803298e-07, |
|
"loss": 0.4557, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.87335899167767e-07, |
|
"loss": 0.4549, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.791251264324039e-07, |
|
"loss": 0.4548, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.709619668167122e-07, |
|
"loss": 0.4623, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.628464620199404e-07, |
|
"loss": 0.4723, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.547786534979083e-07, |
|
"loss": 0.4604, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.467585824627886e-07, |
|
"loss": 0.453, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.387862898829078e-07, |
|
"loss": 0.4812, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.30861816482522e-07, |
|
"loss": 0.473, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.229852027416183e-07, |
|
"loss": 0.4504, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.151564888957084e-07, |
|
"loss": 0.4708, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.073757149356185e-07, |
|
"loss": 0.4568, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.996429206072874e-07, |
|
"loss": 0.4619, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.919581454115598e-07, |
|
"loss": 0.4639, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.843214286039956e-07, |
|
"loss": 0.4609, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.767328091946544e-07, |
|
"loss": 0.459, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.691923259479093e-07, |
|
"loss": 0.4564, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.617000173822385e-07, |
|
"loss": 0.4551, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.542559217700339e-07, |
|
"loss": 0.4646, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.468600771374077e-07, |
|
"loss": 0.4526, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.395125212639895e-07, |
|
"loss": 0.4547, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.322132916827483e-07, |
|
"loss": 0.4763, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.249624256797803e-07, |
|
"loss": 0.4526, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.177599602941407e-07, |
|
"loss": 0.4529, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.106059323176371e-07, |
|
"loss": 0.4486, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.035003782946468e-07, |
|
"loss": 0.4522, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.964433345219354e-07, |
|
"loss": 0.4526, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.894348370484648e-07, |
|
"loss": 0.4654, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.824749216752134e-07, |
|
"loss": 0.4497, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.755636239549877e-07, |
|
"loss": 0.4552, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6870097919224923e-07, |
|
"loss": 0.4707, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6188702244292614e-07, |
|
"loss": 0.4647, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.551217885142378e-07, |
|
"loss": 0.4617, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.484053119645182e-07, |
|
"loss": 0.4645, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.417376271030327e-07, |
|
"loss": 0.4516, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.3511876798981387e-07, |
|
"loss": 0.4497, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.285487684354772e-07, |
|
"loss": 0.4449, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.220276620010566e-07, |
|
"loss": 0.4829, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.1555548199782357e-07, |
|
"loss": 0.4744, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.091322614871274e-07, |
|
"loss": 0.448, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.0275803328021946e-07, |
|
"loss": 0.4688, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.96432829938086e-07, |
|
"loss": 0.4641, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.9015668377128446e-07, |
|
"loss": 0.4588, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.839296268397774e-07, |
|
"loss": 0.4381, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.777516909527701e-07, |
|
"loss": 0.4575, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.7162290766854293e-07, |
|
"loss": 0.4515, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.6554330829429716e-07, |
|
"loss": 0.4425, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.5951292388598956e-07, |
|
"loss": 0.4534, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.5353178524817566e-07, |
|
"loss": 0.4705, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.4759992293385446e-07, |
|
"loss": 0.4558, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.417173672443075e-07, |
|
"loss": 0.4688, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.3588414822895097e-07, |
|
"loss": 0.4524, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.30100295685174e-07, |
|
"loss": 0.4656, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.24365839158195e-07, |
|
"loss": 0.46, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.1868080794090316e-07, |
|
"loss": 0.4479, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.130452310737131e-07, |
|
"loss": 0.4739, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.0745913734441357e-07, |
|
"loss": 0.4518, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.0192255528802894e-07, |
|
"loss": 0.4623, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.9643551318665917e-07, |
|
"loss": 0.4586, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.9099803906934567e-07, |
|
"loss": 0.4621, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.8561016071192884e-07, |
|
"loss": 0.4563, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.8027190563689745e-07, |
|
"loss": 0.4611, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.7498330111325635e-07, |
|
"loss": 0.4539, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.697443741563832e-07, |
|
"loss": 0.4505, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.6455515152789435e-07, |
|
"loss": 0.4656, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.594156597355002e-07, |
|
"loss": 0.4494, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.5432592503288e-07, |
|
"loss": 0.4502, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.492859734195385e-07, |
|
"loss": 0.4638, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.442958306406795e-07, |
|
"loss": 0.4523, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.393555221870747e-07, |
|
"loss": 0.4595, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3446507329492274e-07, |
|
"loss": 0.4462, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2962450894573606e-07, |
|
"loss": 0.456, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2483385386620317e-07, |
|
"loss": 0.4488, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.200931325280664e-07, |
|
"loss": 0.451, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.1540236914799116e-07, |
|
"loss": 0.4614, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.1076158768744914e-07, |
|
"loss": 0.453, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0617081185259512e-07, |
|
"loss": 0.462, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0163006509414052e-07, |
|
"loss": 0.4656, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.9713937060723887e-07, |
|
"loss": 0.4484, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.9269875133136384e-07, |
|
"loss": 0.4609, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8830822995019482e-07, |
|
"loss": 0.4604, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8396782889150144e-07, |
|
"loss": 0.4554, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.7967757032702481e-07, |
|
"loss": 0.4474, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7543747617236873e-07, |
|
"loss": 0.4784, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7124756808688525e-07, |
|
"loss": 0.4452, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6710786747356377e-07, |
|
"loss": 0.4469, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.630183954789233e-07, |
|
"loss": 0.4648, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.5897917299290578e-07, |
|
"loss": 0.457, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.5499022064876412e-07, |
|
"loss": 0.4519, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.510515588229633e-07, |
|
"loss": 0.443, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4716320763507152e-07, |
|
"loss": 0.4552, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4332518694765708e-07, |
|
"loss": 0.4583, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3953751636619162e-07, |
|
"loss": 0.4643, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3580021523894482e-07, |
|
"loss": 0.458, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3211330265689104e-07, |
|
"loss": 0.4647, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2847679745360607e-07, |
|
"loss": 0.4541, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2489071820517394e-07, |
|
"loss": 0.4558, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2135508323009248e-07, |
|
"loss": 0.4604, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1786991058917785e-07, |
|
"loss": 0.4677, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1443521808547353e-07, |
|
"loss": 0.4647, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1105102326415929e-07, |
|
"loss": 0.4513, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.0771734341246121e-07, |
|
"loss": 0.4676, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0443419555956402e-07, |
|
"loss": 0.4541, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.012015964765245e-07, |
|
"loss": 0.4596, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.801956267618262e-08, |
|
"loss": 0.4609, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.488811041308166e-08, |
|
"loss": 0.4707, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.180725568338045e-08, |
|
"loss": 0.4561, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.87770142247768e-08, |
|
"loss": 0.455, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.579740151642534e-08, |
|
"loss": 0.4638, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.286843277885426e-08, |
|
"loss": 0.4616, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.999012297389419e-08, |
|
"loss": 0.4496, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.716248680459726e-08, |
|
"loss": 0.4612, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.438553871516152e-08, |
|
"loss": 0.4487, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.165929289086327e-08, |
|
"loss": 0.4514, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.898376325797596e-08, |
|
"loss": 0.4512, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.635896348370475e-08, |
|
"loss": 0.4475, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.378490697611761e-08, |
|
"loss": 0.4458, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.126160688407101e-08, |
|
"loss": 0.4643, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.878907609714879e-08, |
|
"loss": 0.4519, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.636732724559113e-08, |
|
"loss": 0.4522, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.399637270023683e-08, |
|
"loss": 0.4511, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.1676224572452246e-08, |
|
"loss": 0.4476, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.940689471407356e-08, |
|
"loss": 0.456, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.718839471734904e-08, |
|
"loss": 0.4658, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.502073591487244e-08, |
|
"loss": 0.4496, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.2903929379530806e-08, |
|
"loss": 0.4732, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.083798592444899e-08, |
|
"loss": 0.4596, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.882291610292854e-08, |
|
"loss": 0.4617, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.68587302083967e-08, |
|
"loss": 0.4521, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.494543827435748e-08, |
|
"loss": 0.4585, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.308305007433399e-08, |
|
"loss": 0.461, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.127157512182288e-08, |
|
"loss": 0.4633, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9511022670246635e-08, |
|
"loss": 0.4525, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.7801401712900245e-08, |
|
"loss": 0.4577, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.6142720982911264e-08, |
|
"loss": 0.456, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.4534988953192063e-08, |
|
"loss": 0.4497, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.2978213836400974e-08, |
|
"loss": 0.4636, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.1472403584892332e-08, |
|
"loss": 0.4539, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.0017565890683154e-08, |
|
"loss": 0.4501, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.8613708185410973e-08, |
|
"loss": 0.4478, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.726083764029607e-08, |
|
"loss": 0.4736, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.5958961166104847e-08, |
|
"loss": 0.4488, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4708085413113194e-08, |
|
"loss": 0.4536, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.3508216771076499e-08, |
|
"loss": 0.4558, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.2359361369190804e-08, |
|
"loss": 0.4454, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1261525076069479e-08, |
|
"loss": 0.4575, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.0214713499706596e-08, |
|
"loss": 0.4536, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.218931987448055e-09, |
|
"loss": 0.4564, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.274185625971598e-09, |
|
"loss": 0.4506, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.380479241253513e-09, |
|
"loss": 0.4608, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.5378173985441994e-09, |
|
"loss": 0.4483, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.7462044023515186e-09, |
|
"loss": 0.4458, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.0056442964119265e-09, |
|
"loss": 0.4673, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.316140863671603e-09, |
|
"loss": 0.4558, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.6776976262697937e-09, |
|
"loss": 0.4627, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.0903178455166106e-09, |
|
"loss": 0.4618, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.5540045218819256e-09, |
|
"loss": 0.4483, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.0687603949720583e-09, |
|
"loss": 0.4695, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.6345879435231138e-09, |
|
"loss": 0.4565, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.2514893853865506e-09, |
|
"loss": 0.4595, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 9.194666775158567e-10, |
|
"loss": 0.4614, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 6.385215159565583e-10, |
|
"loss": 0.4693, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.086553358395584e-10, |
|
"loss": 0.4618, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.2986931137447544e-10, |
|
"loss": 0.4639, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.0216435583743079e-10, |
|
"loss": 0.4455, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 2.5541121576599937e-11, |
|
"loss": 0.4644, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.4231, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1434, |
|
"total_flos": 2552648562245632.0, |
|
"train_loss": 0.6098201287549244, |
|
"train_runtime": 11372.0534, |
|
"train_samples_per_second": 16.124, |
|
"train_steps_per_second": 0.126 |
|
} |
|
], |
|
"max_steps": 1434, |
|
"num_train_epochs": 3, |
|
"total_flos": 2552648562245632.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|