|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.596169193934557, |
|
"eval_steps": 500, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.6315789473684213e-07, |
|
"loss": 0.6487, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5.263157894736843e-07, |
|
"loss": 0.6644, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 7.894736842105263e-07, |
|
"loss": 0.6141, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0526315789473685e-06, |
|
"loss": 0.6415, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.3157894736842106e-06, |
|
"loss": 0.6281, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.5789473684210526e-06, |
|
"loss": 0.6542, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.8421052631578948e-06, |
|
"loss": 0.6392, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.105263157894737e-06, |
|
"loss": 0.6253, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.368421052631579e-06, |
|
"loss": 0.6373, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 0.6655, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.8947368421052634e-06, |
|
"loss": 0.6521, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.157894736842105e-06, |
|
"loss": 0.6611, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.421052631578948e-06, |
|
"loss": 0.6569, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6842105263157896e-06, |
|
"loss": 0.6166, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.947368421052632e-06, |
|
"loss": 0.634, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.210526315789474e-06, |
|
"loss": 0.6268, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.473684210526316e-06, |
|
"loss": 0.633, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.736842105263158e-06, |
|
"loss": 0.5873, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5754, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 0.6279, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.526315789473685e-06, |
|
"loss": 0.6274, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5.789473684210527e-06, |
|
"loss": 0.6202, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.0526315789473685e-06, |
|
"loss": 0.6231, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.31578947368421e-06, |
|
"loss": 0.6377, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.578947368421054e-06, |
|
"loss": 0.6046, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6.842105263157896e-06, |
|
"loss": 0.5962, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.1052631578947375e-06, |
|
"loss": 0.6332, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.368421052631579e-06, |
|
"loss": 0.618, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.631578947368423e-06, |
|
"loss": 0.6363, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 7.894736842105265e-06, |
|
"loss": 0.6094, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 8.157894736842106e-06, |
|
"loss": 0.5766, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.421052631578948e-06, |
|
"loss": 0.5777, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.68421052631579e-06, |
|
"loss": 0.6267, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 8.947368421052632e-06, |
|
"loss": 0.6147, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.210526315789474e-06, |
|
"loss": 0.6288, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.473684210526315e-06, |
|
"loss": 0.6146, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.736842105263159e-06, |
|
"loss": 0.5781, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1e-05, |
|
"loss": 0.5902, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0263157894736844e-05, |
|
"loss": 0.6087, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 0.672, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.0789473684210528e-05, |
|
"loss": 0.616, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.105263157894737e-05, |
|
"loss": 0.5914, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.1315789473684212e-05, |
|
"loss": 0.5995, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1578947368421053e-05, |
|
"loss": 0.619, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.1842105263157895e-05, |
|
"loss": 0.5858, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2105263157894737e-05, |
|
"loss": 0.663, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.236842105263158e-05, |
|
"loss": 0.6458, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.263157894736842e-05, |
|
"loss": 0.5941, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2894736842105264e-05, |
|
"loss": 0.6095, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3157894736842108e-05, |
|
"loss": 0.6187, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3421052631578948e-05, |
|
"loss": 0.6084, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3684210526315791e-05, |
|
"loss": 0.6611, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.3947368421052631e-05, |
|
"loss": 0.5786, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4210526315789475e-05, |
|
"loss": 0.6292, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4473684210526317e-05, |
|
"loss": 0.6511, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.4736842105263159e-05, |
|
"loss": 0.6452, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.6517, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5263157894736846e-05, |
|
"loss": 0.6404, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5526315789473686e-05, |
|
"loss": 0.6534, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.578947368421053e-05, |
|
"loss": 0.6639, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.605263157894737e-05, |
|
"loss": 0.5906, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6315789473684213e-05, |
|
"loss": 0.6151, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6578947368421053e-05, |
|
"loss": 0.6352, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.6842105263157896e-05, |
|
"loss": 0.6549, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7105263157894737e-05, |
|
"loss": 0.6084, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.736842105263158e-05, |
|
"loss": 0.6229, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.763157894736842e-05, |
|
"loss": 0.6251, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.7894736842105264e-05, |
|
"loss": 0.6091, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8157894736842107e-05, |
|
"loss": 0.5874, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 0.609, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.868421052631579e-05, |
|
"loss": 0.5909, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.894736842105263e-05, |
|
"loss": 0.6391, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9210526315789474e-05, |
|
"loss": 0.6419, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9473684210526318e-05, |
|
"loss": 0.628, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9736842105263158e-05, |
|
"loss": 0.6238, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 2e-05, |
|
"loss": 0.686, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9991769547325103e-05, |
|
"loss": 0.662, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.998353909465021e-05, |
|
"loss": 0.6295, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.997530864197531e-05, |
|
"loss": 0.65, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9967078189300412e-05, |
|
"loss": 0.6087, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9958847736625514e-05, |
|
"loss": 0.7357, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.995061728395062e-05, |
|
"loss": 0.6881, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.994238683127572e-05, |
|
"loss": 0.6753, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9934156378600823e-05, |
|
"loss": 0.6538, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9925925925925928e-05, |
|
"loss": 0.6558, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991769547325103e-05, |
|
"loss": 0.6554, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9909465020576132e-05, |
|
"loss": 0.6577, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9901234567901237e-05, |
|
"loss": 0.6316, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.989300411522634e-05, |
|
"loss": 0.6142, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.988477366255144e-05, |
|
"loss": 0.6853, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9876543209876546e-05, |
|
"loss": 0.6294, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9868312757201648e-05, |
|
"loss": 0.6505, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.986008230452675e-05, |
|
"loss": 0.6358, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9851851851851855e-05, |
|
"loss": 0.6874, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9843621399176956e-05, |
|
"loss": 0.6253, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.983539094650206e-05, |
|
"loss": 0.6699, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9827160493827163e-05, |
|
"loss": 0.6514, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9818930041152265e-05, |
|
"loss": 0.6626, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.981069958847737e-05, |
|
"loss": 0.631, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9802469135802472e-05, |
|
"loss": 0.6432, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9794238683127574e-05, |
|
"loss": 0.6137, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9786008230452676e-05, |
|
"loss": 0.625, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.977777777777778e-05, |
|
"loss": 0.6344, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9769547325102883e-05, |
|
"loss": 0.6638, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9761316872427985e-05, |
|
"loss": 0.6438, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9753086419753087e-05, |
|
"loss": 0.6541, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9744855967078192e-05, |
|
"loss": 0.6406, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9736625514403294e-05, |
|
"loss": 0.6225, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9728395061728395e-05, |
|
"loss": 0.6389, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.97201646090535e-05, |
|
"loss": 0.6679, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9711934156378602e-05, |
|
"loss": 0.6647, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9703703703703704e-05, |
|
"loss": 0.6491, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9695473251028806e-05, |
|
"loss": 0.6349, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.968724279835391e-05, |
|
"loss": 0.6354, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9679012345679013e-05, |
|
"loss": 0.6326, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9670781893004115e-05, |
|
"loss": 0.6437, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.966255144032922e-05, |
|
"loss": 0.6462, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9654320987654322e-05, |
|
"loss": 0.6322, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9646090534979424e-05, |
|
"loss": 0.6674, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.963786008230453e-05, |
|
"loss": 0.6936, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.962962962962963e-05, |
|
"loss": 0.638, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9621399176954733e-05, |
|
"loss": 0.6224, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9613168724279838e-05, |
|
"loss": 0.6202, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.960493827160494e-05, |
|
"loss": 0.6686, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.959670781893004e-05, |
|
"loss": 0.6311, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9588477366255147e-05, |
|
"loss": 0.604, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.958024691358025e-05, |
|
"loss": 0.6507, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9572016460905354e-05, |
|
"loss": 0.6805, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9563786008230455e-05, |
|
"loss": 0.67, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 0.6694, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9547325102880663e-05, |
|
"loss": 0.6603, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9539094650205764e-05, |
|
"loss": 0.6699, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9530864197530866e-05, |
|
"loss": 0.7279, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9522633744855968e-05, |
|
"loss": 0.6208, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9514403292181073e-05, |
|
"loss": 0.6499, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9506172839506175e-05, |
|
"loss": 0.6685, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9497942386831277e-05, |
|
"loss": 0.6557, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9489711934156382e-05, |
|
"loss": 0.6737, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9481481481481484e-05, |
|
"loss": 0.6111, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9473251028806586e-05, |
|
"loss": 0.6348, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9465020576131687e-05, |
|
"loss": 0.6718, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9456790123456793e-05, |
|
"loss": 0.6908, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9448559670781894e-05, |
|
"loss": 0.6281, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9440329218106996e-05, |
|
"loss": 0.6331, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.94320987654321e-05, |
|
"loss": 0.5998, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9423868312757203e-05, |
|
"loss": 0.6359, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9415637860082305e-05, |
|
"loss": 0.6755, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9407407407407407e-05, |
|
"loss": 0.6374, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9399176954732512e-05, |
|
"loss": 0.6638, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9390946502057614e-05, |
|
"loss": 0.6071, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9382716049382716e-05, |
|
"loss": 0.6809, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.937448559670782e-05, |
|
"loss": 0.6175, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9366255144032923e-05, |
|
"loss": 0.6628, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9358024691358025e-05, |
|
"loss": 0.6567, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.934979423868313e-05, |
|
"loss": 0.7288, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.934156378600823e-05, |
|
"loss": 0.6834, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 0.6118, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.932510288065844e-05, |
|
"loss": 0.616, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.931687242798354e-05, |
|
"loss": 0.6659, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9308641975308646e-05, |
|
"loss": 0.634, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9300411522633747e-05, |
|
"loss": 0.613, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.929218106995885e-05, |
|
"loss": 0.6069, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9283950617283955e-05, |
|
"loss": 0.7148, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9275720164609056e-05, |
|
"loss": 0.6548, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9267489711934158e-05, |
|
"loss": 0.6534, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.925925925925926e-05, |
|
"loss": 0.6598, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9251028806584365e-05, |
|
"loss": 0.6485, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9242798353909467e-05, |
|
"loss": 0.6614, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.923456790123457e-05, |
|
"loss": 0.5956, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9226337448559674e-05, |
|
"loss": 0.6292, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9218106995884776e-05, |
|
"loss": 0.6557, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9209876543209878e-05, |
|
"loss": 0.6378, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.920164609053498e-05, |
|
"loss": 0.6515, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9193415637860085e-05, |
|
"loss": 0.6401, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9185185185185186e-05, |
|
"loss": 0.6388, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9176954732510288e-05, |
|
"loss": 0.6597, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9168724279835393e-05, |
|
"loss": 0.6912, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9160493827160495e-05, |
|
"loss": 0.601, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9152263374485597e-05, |
|
"loss": 0.5757, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.91440329218107e-05, |
|
"loss": 0.6278, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9135802469135804e-05, |
|
"loss": 0.6985, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9127572016460906e-05, |
|
"loss": 0.7056, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9119341563786008e-05, |
|
"loss": 0.5969, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 0.624, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9102880658436215e-05, |
|
"loss": 0.7324, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9094650205761317e-05, |
|
"loss": 0.7273, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9086419753086422e-05, |
|
"loss": 0.6393, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9078189300411524e-05, |
|
"loss": 0.669, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9069958847736625e-05, |
|
"loss": 0.6635, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.906172839506173e-05, |
|
"loss": 0.6529, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9053497942386832e-05, |
|
"loss": 0.6548, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9045267489711938e-05, |
|
"loss": 0.6632, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.903703703703704e-05, |
|
"loss": 0.6497, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.902880658436214e-05, |
|
"loss": 0.6333, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9020576131687247e-05, |
|
"loss": 0.6195, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.901234567901235e-05, |
|
"loss": 0.6771, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.900411522633745e-05, |
|
"loss": 0.6737, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8995884773662555e-05, |
|
"loss": 0.6388, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8987654320987657e-05, |
|
"loss": 0.6086, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.897942386831276e-05, |
|
"loss": 0.6954, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.897119341563786e-05, |
|
"loss": 0.65, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8962962962962966e-05, |
|
"loss": 0.6544, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8954732510288068e-05, |
|
"loss": 0.6824, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.894650205761317e-05, |
|
"loss": 0.6549, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8938271604938275e-05, |
|
"loss": 0.7288, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8930041152263377e-05, |
|
"loss": 0.6738, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.892181069958848e-05, |
|
"loss": 0.7009, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.891358024691358e-05, |
|
"loss": 0.6515, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8905349794238685e-05, |
|
"loss": 0.6545, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8897119341563787e-05, |
|
"loss": 0.6139, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 0.6256, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8880658436213994e-05, |
|
"loss": 0.6612, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8872427983539096e-05, |
|
"loss": 0.6823, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8864197530864198e-05, |
|
"loss": 0.701, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.88559670781893e-05, |
|
"loss": 0.6589, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8847736625514405e-05, |
|
"loss": 0.6998, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8839506172839507e-05, |
|
"loss": 0.6361, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.883127572016461e-05, |
|
"loss": 0.6372, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8823045267489714e-05, |
|
"loss": 0.6627, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8814814814814816e-05, |
|
"loss": 0.6597, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8806584362139917e-05, |
|
"loss": 0.6513, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8798353909465023e-05, |
|
"loss": 0.6184, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8790123456790124e-05, |
|
"loss": 0.6736, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.878189300411523e-05, |
|
"loss": 0.6227, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.877366255144033e-05, |
|
"loss": 0.7008, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8765432098765433e-05, |
|
"loss": 0.629, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.875720164609054e-05, |
|
"loss": 0.6751, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.874897119341564e-05, |
|
"loss": 0.6407, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8740740740740742e-05, |
|
"loss": 0.6166, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8732510288065847e-05, |
|
"loss": 0.6539, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.872427983539095e-05, |
|
"loss": 0.6517, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.871604938271605e-05, |
|
"loss": 0.6558, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8707818930041153e-05, |
|
"loss": 0.6422, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8699588477366258e-05, |
|
"loss": 0.6737, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.869135802469136e-05, |
|
"loss": 0.6326, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.868312757201646e-05, |
|
"loss": 0.7108, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8674897119341567e-05, |
|
"loss": 0.6744, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 0.6607, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.865843621399177e-05, |
|
"loss": 0.6304, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8650205761316872e-05, |
|
"loss": 0.6559, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8641975308641977e-05, |
|
"loss": 0.6008, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.863374485596708e-05, |
|
"loss": 0.6412, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.862551440329218e-05, |
|
"loss": 0.6215, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8617283950617286e-05, |
|
"loss": 0.6417, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8609053497942388e-05, |
|
"loss": 0.6501, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.860082304526749e-05, |
|
"loss": 0.6536, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8592592592592592e-05, |
|
"loss": 0.663, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8584362139917697e-05, |
|
"loss": 0.6357, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.85761316872428e-05, |
|
"loss": 0.602, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.85679012345679e-05, |
|
"loss": 0.6002, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8559670781893006e-05, |
|
"loss": 0.5942, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8551440329218108e-05, |
|
"loss": 0.6082, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8543209876543213e-05, |
|
"loss": 0.6408, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8534979423868315e-05, |
|
"loss": 0.6777, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8526748971193416e-05, |
|
"loss": 0.6613, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.851851851851852e-05, |
|
"loss": 0.6704, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8510288065843623e-05, |
|
"loss": 0.6037, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8502057613168725e-05, |
|
"loss": 0.6532, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.849382716049383e-05, |
|
"loss": 0.6123, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8485596707818932e-05, |
|
"loss": 0.6658, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8477366255144034e-05, |
|
"loss": 0.6753, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.846913580246914e-05, |
|
"loss": 0.6448, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.846090534979424e-05, |
|
"loss": 0.5887, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8452674897119343e-05, |
|
"loss": 0.6264, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8444444444444448e-05, |
|
"loss": 0.6656, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.843621399176955e-05, |
|
"loss": 0.6447, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8427983539094652e-05, |
|
"loss": 0.5978, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8419753086419754e-05, |
|
"loss": 0.6323, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.841152263374486e-05, |
|
"loss": 0.6535, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.840329218106996e-05, |
|
"loss": 0.6056, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8395061728395062e-05, |
|
"loss": 0.6339, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8386831275720164e-05, |
|
"loss": 0.6336, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.837860082304527e-05, |
|
"loss": 0.645, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.837037037037037e-05, |
|
"loss": 0.6561, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8362139917695473e-05, |
|
"loss": 0.6263, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.835390946502058e-05, |
|
"loss": 0.6537, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.834567901234568e-05, |
|
"loss": 0.5949, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8337448559670782e-05, |
|
"loss": 0.6378, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8329218106995884e-05, |
|
"loss": 0.658, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.832098765432099e-05, |
|
"loss": 0.6538, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.831275720164609e-05, |
|
"loss": 0.6454, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8304526748971193e-05, |
|
"loss": 0.6609, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8296296296296298e-05, |
|
"loss": 0.669, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.82880658436214e-05, |
|
"loss": 0.6615, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8279835390946505e-05, |
|
"loss": 0.6236, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8271604938271607e-05, |
|
"loss": 0.672, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.826337448559671e-05, |
|
"loss": 0.6304, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8255144032921814e-05, |
|
"loss": 0.6671, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8246913580246915e-05, |
|
"loss": 0.6538, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8238683127572017e-05, |
|
"loss": 0.6119, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8230452674897122e-05, |
|
"loss": 0.6066, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 0.6064, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8213991769547326e-05, |
|
"loss": 0.6018, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.820576131687243e-05, |
|
"loss": 0.6307, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8197530864197533e-05, |
|
"loss": 0.6473, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8189300411522635e-05, |
|
"loss": 0.6149, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.818106995884774e-05, |
|
"loss": 0.6399, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8172839506172842e-05, |
|
"loss": 0.6297, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8164609053497944e-05, |
|
"loss": 0.7015, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8156378600823046e-05, |
|
"loss": 0.6196, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.814814814814815e-05, |
|
"loss": 0.6463, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8139917695473253e-05, |
|
"loss": 0.6232, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8131687242798354e-05, |
|
"loss": 0.6374, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.812345679012346e-05, |
|
"loss": 0.6684, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.811522633744856e-05, |
|
"loss": 0.6609, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.8106995884773663e-05, |
|
"loss": 0.6217, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8098765432098765e-05, |
|
"loss": 0.67, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.809053497942387e-05, |
|
"loss": 0.63, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8082304526748972e-05, |
|
"loss": 0.5896, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8074074074074074e-05, |
|
"loss": 0.6012, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.806584362139918e-05, |
|
"loss": 0.632, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.805761316872428e-05, |
|
"loss": 0.6119, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8049382716049383e-05, |
|
"loss": 0.6451, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8041152263374485e-05, |
|
"loss": 0.5984, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.803292181069959e-05, |
|
"loss": 0.6617, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.802469135802469e-05, |
|
"loss": 0.6145, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8016460905349797e-05, |
|
"loss": 0.6188, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.80082304526749e-05, |
|
"loss": 0.5804, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.6225, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7991769547325106e-05, |
|
"loss": 0.6267, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7983539094650207e-05, |
|
"loss": 0.6459, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.797530864197531e-05, |
|
"loss": 0.6723, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7967078189300414e-05, |
|
"loss": 0.6508, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7958847736625516e-05, |
|
"loss": 0.6537, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7950617283950618e-05, |
|
"loss": 0.6519, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7942386831275723e-05, |
|
"loss": 0.6578, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7934156378600825e-05, |
|
"loss": 0.6379, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7925925925925927e-05, |
|
"loss": 0.6127, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7917695473251032e-05, |
|
"loss": 0.6139, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7909465020576134e-05, |
|
"loss": 0.6128, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7901234567901236e-05, |
|
"loss": 0.5898, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7893004115226338e-05, |
|
"loss": 0.6081, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7884773662551443e-05, |
|
"loss": 0.6359, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7876543209876545e-05, |
|
"loss": 0.6268, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7868312757201646e-05, |
|
"loss": 0.6058, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.786008230452675e-05, |
|
"loss": 0.5287, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7851851851851853e-05, |
|
"loss": 0.6083, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7843621399176955e-05, |
|
"loss": 0.6445, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7835390946502057e-05, |
|
"loss": 0.6333, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7827160493827162e-05, |
|
"loss": 0.6612, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7818930041152264e-05, |
|
"loss": 0.6143, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7810699588477366e-05, |
|
"loss": 0.5507, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.780246913580247e-05, |
|
"loss": 0.6121, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7794238683127573e-05, |
|
"loss": 0.6195, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7786008230452675e-05, |
|
"loss": 0.6057, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7777777777777777e-05, |
|
"loss": 0.6632, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7769547325102882e-05, |
|
"loss": 0.5683, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7761316872427984e-05, |
|
"loss": 0.5928, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.775308641975309e-05, |
|
"loss": 0.6613, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.774485596707819e-05, |
|
"loss": 0.6542, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7736625514403292e-05, |
|
"loss": 0.6588, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7728395061728398e-05, |
|
"loss": 0.5986, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.77201646090535e-05, |
|
"loss": 0.6309, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.77119341563786e-05, |
|
"loss": 0.6338, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7703703703703706e-05, |
|
"loss": 0.6144, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.769547325102881e-05, |
|
"loss": 0.6216, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7687242798353914e-05, |
|
"loss": 0.6183, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7679012345679015e-05, |
|
"loss": 0.6082, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7670781893004117e-05, |
|
"loss": 0.605, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.766255144032922e-05, |
|
"loss": 0.589, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7654320987654324e-05, |
|
"loss": 0.6644, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7646090534979426e-05, |
|
"loss": 0.6635, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7637860082304528e-05, |
|
"loss": 0.6392, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7629629629629633e-05, |
|
"loss": 0.5786, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7621399176954735e-05, |
|
"loss": 0.6126, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7613168724279837e-05, |
|
"loss": 0.5781, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.760493827160494e-05, |
|
"loss": 0.6108, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7596707818930044e-05, |
|
"loss": 0.5744, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.7588477366255145e-05, |
|
"loss": 0.6323, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7580246913580247e-05, |
|
"loss": 0.6288, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7572016460905352e-05, |
|
"loss": 0.6054, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7563786008230454e-05, |
|
"loss": 0.6725, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 0.6441, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7547325102880658e-05, |
|
"loss": 0.5586, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7539094650205763e-05, |
|
"loss": 0.6179, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7530864197530865e-05, |
|
"loss": 0.6255, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7522633744855967e-05, |
|
"loss": 0.6096, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7514403292181072e-05, |
|
"loss": 0.6303, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7506172839506174e-05, |
|
"loss": 0.5581, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7497942386831276e-05, |
|
"loss": 0.6254, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.748971193415638e-05, |
|
"loss": 0.5763, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.7481481481481483e-05, |
|
"loss": 0.6404, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7473251028806584e-05, |
|
"loss": 0.6309, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.746502057613169e-05, |
|
"loss": 0.5892, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.745679012345679e-05, |
|
"loss": 0.603, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7448559670781893e-05, |
|
"loss": 0.626, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7440329218107e-05, |
|
"loss": 0.6047, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.74320987654321e-05, |
|
"loss": 0.6067, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7423868312757206e-05, |
|
"loss": 0.6574, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7415637860082307e-05, |
|
"loss": 0.6354, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.740740740740741e-05, |
|
"loss": 0.6345, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.739917695473251e-05, |
|
"loss": 0.6425, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7390946502057616e-05, |
|
"loss": 0.6034, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.7382716049382718e-05, |
|
"loss": 0.5942, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.737448559670782e-05, |
|
"loss": 0.5661, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7366255144032925e-05, |
|
"loss": 0.6314, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7358024691358027e-05, |
|
"loss": 0.5899, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.734979423868313e-05, |
|
"loss": 0.6445, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.734156378600823e-05, |
|
"loss": 0.6052, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 0.5993, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7325102880658437e-05, |
|
"loss": 0.6319, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.731687242798354e-05, |
|
"loss": 0.6038, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7308641975308644e-05, |
|
"loss": 0.6356, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7300411522633746e-05, |
|
"loss": 0.6309, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7292181069958848e-05, |
|
"loss": 0.6172, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.728395061728395e-05, |
|
"loss": 0.6102, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.7275720164609055e-05, |
|
"loss": 0.606, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7267489711934157e-05, |
|
"loss": 0.5993, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.725925925925926e-05, |
|
"loss": 0.6291, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7251028806584364e-05, |
|
"loss": 0.6131, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7242798353909466e-05, |
|
"loss": 0.6307, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7234567901234568e-05, |
|
"loss": 0.6069, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7226337448559673e-05, |
|
"loss": 0.6129, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7218106995884775e-05, |
|
"loss": 0.5999, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7209876543209876e-05, |
|
"loss": 0.5973, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.720164609053498e-05, |
|
"loss": 0.6087, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7193415637860083e-05, |
|
"loss": 0.5843, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.7185185185185185e-05, |
|
"loss": 0.662, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.717695473251029e-05, |
|
"loss": 0.5923, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7168724279835392e-05, |
|
"loss": 0.6279, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7160493827160498e-05, |
|
"loss": 0.6212, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.71522633744856e-05, |
|
"loss": 0.6264, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.71440329218107e-05, |
|
"loss": 0.6419, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7135802469135806e-05, |
|
"loss": 0.5774, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7127572016460908e-05, |
|
"loss": 0.614, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.711934156378601e-05, |
|
"loss": 0.6287, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 0.6377, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7102880658436217e-05, |
|
"loss": 0.6523, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.709465020576132e-05, |
|
"loss": 0.6033, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.708641975308642e-05, |
|
"loss": 0.6365, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7078189300411526e-05, |
|
"loss": 0.6646, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.7069958847736628e-05, |
|
"loss": 0.645, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.706172839506173e-05, |
|
"loss": 0.6217, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.705349794238683e-05, |
|
"loss": 0.6383, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7045267489711936e-05, |
|
"loss": 0.5929, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7037037037037038e-05, |
|
"loss": 0.6395, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.702880658436214e-05, |
|
"loss": 0.579, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7020576131687242e-05, |
|
"loss": 0.6132, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.7012345679012347e-05, |
|
"loss": 0.6177, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.700411522633745e-05, |
|
"loss": 0.5766, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.699588477366255e-05, |
|
"loss": 0.6222, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.6987654320987656e-05, |
|
"loss": 0.6346, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.6979423868312758e-05, |
|
"loss": 0.5914, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.697119341563786e-05, |
|
"loss": 0.6204, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6962962962962965e-05, |
|
"loss": 0.6244, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6954732510288067e-05, |
|
"loss": 0.5821, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.694650205761317e-05, |
|
"loss": 0.5912, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6938271604938274e-05, |
|
"loss": 0.6165, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6930041152263375e-05, |
|
"loss": 0.5832, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.692181069958848e-05, |
|
"loss": 0.6354, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6913580246913582e-05, |
|
"loss": 0.6096, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6905349794238684e-05, |
|
"loss": 0.6405, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.689711934156379e-05, |
|
"loss": 0.5501, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 0.6346, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.6880658436213993e-05, |
|
"loss": 0.6051, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.68724279835391e-05, |
|
"loss": 0.5807, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.68641975308642e-05, |
|
"loss": 0.5896, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6855967078189302e-05, |
|
"loss": 0.5758, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6847736625514404e-05, |
|
"loss": 0.5819, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.683950617283951e-05, |
|
"loss": 0.6367, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.683127572016461e-05, |
|
"loss": 0.638, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6823045267489713e-05, |
|
"loss": 0.634, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6814814814814818e-05, |
|
"loss": 0.6342, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.680658436213992e-05, |
|
"loss": 0.6491, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.679835390946502e-05, |
|
"loss": 0.5746, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6790123456790123e-05, |
|
"loss": 0.5957, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.678189300411523e-05, |
|
"loss": 0.5942, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.677366255144033e-05, |
|
"loss": 0.5826, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.6765432098765432e-05, |
|
"loss": 0.616, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6757201646090537e-05, |
|
"loss": 0.6195, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.674897119341564e-05, |
|
"loss": 0.6843, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.674074074074074e-05, |
|
"loss": 0.5883, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6732510288065843e-05, |
|
"loss": 0.6445, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6724279835390948e-05, |
|
"loss": 0.5817, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.671604938271605e-05, |
|
"loss": 0.576, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.670781893004115e-05, |
|
"loss": 0.5945, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6699588477366257e-05, |
|
"loss": 0.6095, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.669135802469136e-05, |
|
"loss": 0.6554, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.668312757201646e-05, |
|
"loss": 0.6044, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6674897119341566e-05, |
|
"loss": 0.615, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.5805, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.6658436213991773e-05, |
|
"loss": 0.6138, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6650205761316874e-05, |
|
"loss": 0.5903, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6641975308641976e-05, |
|
"loss": 0.6034, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.663374485596708e-05, |
|
"loss": 0.6404, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6625514403292183e-05, |
|
"loss": 0.5957, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6617283950617285e-05, |
|
"loss": 0.6194, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.660905349794239e-05, |
|
"loss": 0.6032, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6600823045267492e-05, |
|
"loss": 0.5826, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6592592592592594e-05, |
|
"loss": 0.61, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6584362139917696e-05, |
|
"loss": 0.5975, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.65761316872428e-05, |
|
"loss": 0.5779, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6567901234567903e-05, |
|
"loss": 0.619, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.6559670781893005e-05, |
|
"loss": 0.5639, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.655144032921811e-05, |
|
"loss": 0.5855, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.654320987654321e-05, |
|
"loss": 0.6027, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6534979423868313e-05, |
|
"loss": 0.5665, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6526748971193415e-05, |
|
"loss": 0.598, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.651851851851852e-05, |
|
"loss": 0.626, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6510288065843622e-05, |
|
"loss": 0.5955, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6502057613168724e-05, |
|
"loss": 0.6545, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.649382716049383e-05, |
|
"loss": 0.5632, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.648559670781893e-05, |
|
"loss": 0.6338, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6477366255144033e-05, |
|
"loss": 0.6542, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6469135802469135e-05, |
|
"loss": 0.648, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.646090534979424e-05, |
|
"loss": 0.5791, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.6452674897119342e-05, |
|
"loss": 0.569, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6444444444444444e-05, |
|
"loss": 0.6353, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.643621399176955e-05, |
|
"loss": 0.6266, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.642798353909465e-05, |
|
"loss": 0.5858, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6419753086419752e-05, |
|
"loss": 0.5775, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6411522633744858e-05, |
|
"loss": 0.6436, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.640329218106996e-05, |
|
"loss": 0.5894, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6395061728395065e-05, |
|
"loss": 0.6015, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6386831275720166e-05, |
|
"loss": 0.59, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6378600823045268e-05, |
|
"loss": 0.6318, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6370370370370374e-05, |
|
"loss": 0.5522, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6362139917695475e-05, |
|
"loss": 0.6141, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.6353909465020577e-05, |
|
"loss": 0.6704, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6345679012345682e-05, |
|
"loss": 0.5917, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6337448559670784e-05, |
|
"loss": 0.6677, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6329218106995886e-05, |
|
"loss": 0.5501, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.632098765432099e-05, |
|
"loss": 0.5735, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6312757201646093e-05, |
|
"loss": 0.6038, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6304526748971195e-05, |
|
"loss": 0.5849, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6296296296296297e-05, |
|
"loss": 0.5568, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6288065843621402e-05, |
|
"loss": 0.6212, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6279835390946504e-05, |
|
"loss": 0.5629, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6271604938271605e-05, |
|
"loss": 0.5877, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.626337448559671e-05, |
|
"loss": 0.6045, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6255144032921812e-05, |
|
"loss": 0.5969, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.6246913580246914e-05, |
|
"loss": 0.6483, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6238683127572016e-05, |
|
"loss": 0.6925, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.623045267489712e-05, |
|
"loss": 0.5737, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 0.5804, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6213991769547325e-05, |
|
"loss": 0.6547, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.620576131687243e-05, |
|
"loss": 0.5347, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6197530864197532e-05, |
|
"loss": 0.545, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6189300411522634e-05, |
|
"loss": 0.5384, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6181069958847736e-05, |
|
"loss": 0.6022, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.617283950617284e-05, |
|
"loss": 0.5841, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6164609053497943e-05, |
|
"loss": 0.6015, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6156378600823044e-05, |
|
"loss": 0.6222, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.614814814814815e-05, |
|
"loss": 0.5661, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.613991769547325e-05, |
|
"loss": 0.5673, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6131687242798357e-05, |
|
"loss": 0.6028, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.612345679012346e-05, |
|
"loss": 0.5987, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.611522633744856e-05, |
|
"loss": 0.5724, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6106995884773666e-05, |
|
"loss": 0.6, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6098765432098767e-05, |
|
"loss": 0.5793, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.609053497942387e-05, |
|
"loss": 0.596, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6082304526748974e-05, |
|
"loss": 0.6053, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6074074074074076e-05, |
|
"loss": 0.6085, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6065843621399178e-05, |
|
"loss": 0.6197, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6057613168724283e-05, |
|
"loss": 0.5463, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6049382716049385e-05, |
|
"loss": 0.6217, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.6041152263374487e-05, |
|
"loss": 0.6048, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.603292181069959e-05, |
|
"loss": 0.5932, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6024691358024694e-05, |
|
"loss": 0.6031, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6016460905349796e-05, |
|
"loss": 0.5902, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6008230452674897e-05, |
|
"loss": 0.5749, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.6424, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5991769547325104e-05, |
|
"loss": 0.5602, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5983539094650206e-05, |
|
"loss": 0.5757, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5975308641975308e-05, |
|
"loss": 0.6082, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5967078189300413e-05, |
|
"loss": 0.648, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5958847736625515e-05, |
|
"loss": 0.6259, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5950617283950617e-05, |
|
"loss": 0.5971, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5942386831275722e-05, |
|
"loss": 0.5763, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.5934156378600824e-05, |
|
"loss": 0.6119, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5925925925925926e-05, |
|
"loss": 0.6069, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5917695473251028e-05, |
|
"loss": 0.5504, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5909465020576133e-05, |
|
"loss": 0.5809, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5901234567901235e-05, |
|
"loss": 0.5935, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5893004115226336e-05, |
|
"loss": 0.5749, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.588477366255144e-05, |
|
"loss": 0.5631, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5876543209876543e-05, |
|
"loss": 0.6188, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.586831275720165e-05, |
|
"loss": 0.5687, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.586008230452675e-05, |
|
"loss": 0.6583, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5851851851851852e-05, |
|
"loss": 0.6085, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.5843621399176957e-05, |
|
"loss": 0.5445, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.583539094650206e-05, |
|
"loss": 0.5987, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.582716049382716e-05, |
|
"loss": 0.6199, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5818930041152266e-05, |
|
"loss": 0.6168, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5810699588477368e-05, |
|
"loss": 0.562, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.580246913580247e-05, |
|
"loss": 0.6126, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5794238683127575e-05, |
|
"loss": 0.6027, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5786008230452677e-05, |
|
"loss": 0.5782, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 0.6589, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5769547325102884e-05, |
|
"loss": 0.584, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5761316872427986e-05, |
|
"loss": 0.6236, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5753086419753088e-05, |
|
"loss": 0.578, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.574485596707819e-05, |
|
"loss": 0.6228, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5736625514403295e-05, |
|
"loss": 0.596, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.5728395061728396e-05, |
|
"loss": 0.5976, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5720164609053498e-05, |
|
"loss": 0.5984, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5711934156378603e-05, |
|
"loss": 0.5889, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5703703703703705e-05, |
|
"loss": 0.6073, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5695473251028807e-05, |
|
"loss": 0.629, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.568724279835391e-05, |
|
"loss": 0.5446, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5679012345679014e-05, |
|
"loss": 0.6082, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5670781893004116e-05, |
|
"loss": 0.5761, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5662551440329218e-05, |
|
"loss": 0.5766, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.565432098765432e-05, |
|
"loss": 0.5862, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5646090534979425e-05, |
|
"loss": 0.5673, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5637860082304527e-05, |
|
"loss": 0.6084, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.5629629629629632e-05, |
|
"loss": 0.523, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5621399176954734e-05, |
|
"loss": 0.5843, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5613168724279835e-05, |
|
"loss": 0.5654, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.560493827160494e-05, |
|
"loss": 0.5862, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5596707818930042e-05, |
|
"loss": 0.5862, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5588477366255144e-05, |
|
"loss": 0.5759, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.558024691358025e-05, |
|
"loss": 0.6136, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.557201646090535e-05, |
|
"loss": 0.5949, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5563786008230453e-05, |
|
"loss": 0.6093, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.555555555555556e-05, |
|
"loss": 0.5808, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.554732510288066e-05, |
|
"loss": 0.5855, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5539094650205762e-05, |
|
"loss": 0.5835, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.5530864197530867e-05, |
|
"loss": 0.5555, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.552263374485597e-05, |
|
"loss": 0.5674, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.551440329218107e-05, |
|
"loss": 0.6055, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5506172839506176e-05, |
|
"loss": 0.585, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5497942386831278e-05, |
|
"loss": 0.5657, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.548971193415638e-05, |
|
"loss": 0.6211, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.548148148148148e-05, |
|
"loss": 0.6239, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5473251028806587e-05, |
|
"loss": 0.5787, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.546502057613169e-05, |
|
"loss": 0.6414, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.545679012345679e-05, |
|
"loss": 0.6435, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5448559670781895e-05, |
|
"loss": 0.5709, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.5440329218106997e-05, |
|
"loss": 0.5996, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.54320987654321e-05, |
|
"loss": 0.6423, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.54238683127572e-05, |
|
"loss": 0.5438, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5415637860082306e-05, |
|
"loss": 0.5748, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5407407407407408e-05, |
|
"loss": 0.6301, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.539917695473251e-05, |
|
"loss": 0.577, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5390946502057615e-05, |
|
"loss": 0.5845, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5382716049382717e-05, |
|
"loss": 0.6111, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.537448559670782e-05, |
|
"loss": 0.7054, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5366255144032924e-05, |
|
"loss": 0.5753, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5358024691358026e-05, |
|
"loss": 0.6248, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5349794238683127e-05, |
|
"loss": 0.5955, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5341563786008233e-05, |
|
"loss": 0.532, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 0.6073, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.5325102880658436e-05, |
|
"loss": 0.6162, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.531687242798354e-05, |
|
"loss": 0.5404, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5308641975308643e-05, |
|
"loss": 0.6226, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.530041152263375e-05, |
|
"loss": 0.5838, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.529218106995885e-05, |
|
"loss": 0.5742, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5283950617283952e-05, |
|
"loss": 0.5691, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5275720164609057e-05, |
|
"loss": 0.6132, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.526748971193416e-05, |
|
"loss": 0.5553, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.525925925925926e-05, |
|
"loss": 0.6106, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5251028806584363e-05, |
|
"loss": 0.5598, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5242798353909466e-05, |
|
"loss": 0.5847, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.523456790123457e-05, |
|
"loss": 0.5495, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5226337448559672e-05, |
|
"loss": 0.5955, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 1.5218106995884773e-05, |
|
"loss": 0.5789, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5209876543209879e-05, |
|
"loss": 0.5653, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.520164609053498e-05, |
|
"loss": 0.6177, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5193415637860082e-05, |
|
"loss": 0.5778, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5185185185185187e-05, |
|
"loss": 0.6116, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.517695473251029e-05, |
|
"loss": 0.5457, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5168724279835391e-05, |
|
"loss": 0.6262, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5160493827160495e-05, |
|
"loss": 0.5957, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5152263374485598e-05, |
|
"loss": 0.5743, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.51440329218107e-05, |
|
"loss": 0.5619, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5135802469135803e-05, |
|
"loss": 0.5688, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5127572016460907e-05, |
|
"loss": 0.5606, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.511934156378601e-05, |
|
"loss": 0.5687, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 0.5531, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5102880658436214e-05, |
|
"loss": 0.6193, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.509465020576132e-05, |
|
"loss": 0.5998, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5086419753086421e-05, |
|
"loss": 0.5382, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5078189300411523e-05, |
|
"loss": 0.5623, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5069958847736628e-05, |
|
"loss": 0.5439, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.506172839506173e-05, |
|
"loss": 0.5638, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5053497942386832e-05, |
|
"loss": 0.6169, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5045267489711934e-05, |
|
"loss": 0.5803, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5037037037037039e-05, |
|
"loss": 0.6376, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.502880658436214e-05, |
|
"loss": 0.5903, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5020576131687244e-05, |
|
"loss": 0.5923, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 1.5012345679012348e-05, |
|
"loss": 0.6079, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.500411522633745e-05, |
|
"loss": 0.5773, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4995884773662553e-05, |
|
"loss": 0.5825, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4987654320987655e-05, |
|
"loss": 0.5935, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4979423868312758e-05, |
|
"loss": 0.641, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4971193415637862e-05, |
|
"loss": 0.615, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4962962962962964e-05, |
|
"loss": 0.7009, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4954732510288069e-05, |
|
"loss": 0.5626, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.494650205761317e-05, |
|
"loss": 0.6019, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4938271604938272e-05, |
|
"loss": 0.5909, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4930041152263374e-05, |
|
"loss": 0.5497, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.492181069958848e-05, |
|
"loss": 0.556, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4913580246913581e-05, |
|
"loss": 0.5475, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.4905349794238683e-05, |
|
"loss": 0.5869, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4897119341563788e-05, |
|
"loss": 0.5369, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.488888888888889e-05, |
|
"loss": 0.6049, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4880658436213992e-05, |
|
"loss": 0.5323, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4872427983539095e-05, |
|
"loss": 0.5991, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4864197530864199e-05, |
|
"loss": 0.558, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4855967078189302e-05, |
|
"loss": 0.5932, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4847736625514404e-05, |
|
"loss": 0.5857, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4839506172839508e-05, |
|
"loss": 0.5639, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4831275720164611e-05, |
|
"loss": 0.5657, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4823045267489713e-05, |
|
"loss": 0.5354, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.4814814814814815e-05, |
|
"loss": 0.6059, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.480658436213992e-05, |
|
"loss": 0.5289, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4798353909465022e-05, |
|
"loss": 0.5742, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4790123456790124e-05, |
|
"loss": 0.5648, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4781893004115229e-05, |
|
"loss": 0.6036, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.477366255144033e-05, |
|
"loss": 0.5734, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4765432098765433e-05, |
|
"loss": 0.5385, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4757201646090536e-05, |
|
"loss": 0.5672, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.474897119341564e-05, |
|
"loss": 0.6105, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4740740740740741e-05, |
|
"loss": 0.5796, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4732510288065845e-05, |
|
"loss": 0.5526, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4724279835390947e-05, |
|
"loss": 0.5916, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.471604938271605e-05, |
|
"loss": 0.632, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4707818930041154e-05, |
|
"loss": 0.6016, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.4699588477366256e-05, |
|
"loss": 0.5889, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.469135802469136e-05, |
|
"loss": 0.5636, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4683127572016463e-05, |
|
"loss": 0.5625, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4674897119341564e-05, |
|
"loss": 0.5556, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4666666666666666e-05, |
|
"loss": 0.5558, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4658436213991771e-05, |
|
"loss": 0.5932, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4650205761316873e-05, |
|
"loss": 0.6073, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4641975308641975e-05, |
|
"loss": 0.5328, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.463374485596708e-05, |
|
"loss": 0.5402, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4625514403292182e-05, |
|
"loss": 0.5665, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4617283950617284e-05, |
|
"loss": 0.5847, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4609053497942387e-05, |
|
"loss": 0.5505, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4600823045267491e-05, |
|
"loss": 0.6071, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4592592592592594e-05, |
|
"loss": 0.5707, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4584362139917696e-05, |
|
"loss": 0.5394, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.45761316872428e-05, |
|
"loss": 0.5738, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4567901234567903e-05, |
|
"loss": 0.5812, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4559670781893005e-05, |
|
"loss": 0.5796, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4551440329218107e-05, |
|
"loss": 0.549, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4543209876543212e-05, |
|
"loss": 0.5823, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4534979423868314e-05, |
|
"loss": 0.5927, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4526748971193416e-05, |
|
"loss": 0.5396, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4518518518518521e-05, |
|
"loss": 0.5451, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4510288065843623e-05, |
|
"loss": 0.5531, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4502057613168725e-05, |
|
"loss": 0.6051, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.4493827160493828e-05, |
|
"loss": 0.5756, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4485596707818932e-05, |
|
"loss": 0.5872, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4477366255144033e-05, |
|
"loss": 0.542, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4469135802469137e-05, |
|
"loss": 0.5696, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.446090534979424e-05, |
|
"loss": 0.5424, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4452674897119342e-05, |
|
"loss": 0.5662, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4444444444444446e-05, |
|
"loss": 0.551, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4436213991769548e-05, |
|
"loss": 0.6475, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4427983539094653e-05, |
|
"loss": 0.6183, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4419753086419755e-05, |
|
"loss": 0.558, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4411522633744856e-05, |
|
"loss": 0.5884, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4403292181069962e-05, |
|
"loss": 0.5911, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4395061728395063e-05, |
|
"loss": 0.5816, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.4386831275720165e-05, |
|
"loss": 0.5613, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4378600823045267e-05, |
|
"loss": 0.5654, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4370370370370372e-05, |
|
"loss": 0.58, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4362139917695474e-05, |
|
"loss": 0.5373, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4353909465020578e-05, |
|
"loss": 0.611, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4345679012345681e-05, |
|
"loss": 0.5067, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4337448559670783e-05, |
|
"loss": 0.5594, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4329218106995886e-05, |
|
"loss": 0.5922, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4320987654320988e-05, |
|
"loss": 0.5606, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4312757201646092e-05, |
|
"loss": 0.5485, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4304526748971195e-05, |
|
"loss": 0.4932, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4296296296296297e-05, |
|
"loss": 0.5634, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.4288065843621399e-05, |
|
"loss": 0.5314, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4279835390946504e-05, |
|
"loss": 0.543, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4271604938271606e-05, |
|
"loss": 0.5625, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4263374485596708e-05, |
|
"loss": 0.5022, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4255144032921813e-05, |
|
"loss": 0.5623, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4246913580246915e-05, |
|
"loss": 0.5766, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4238683127572017e-05, |
|
"loss": 0.608, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.423045267489712e-05, |
|
"loss": 0.5322, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 0.5835, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4213991769547325e-05, |
|
"loss": 0.5109, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4205761316872429e-05, |
|
"loss": 0.5938, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4197530864197532e-05, |
|
"loss": 0.546, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4189300411522636e-05, |
|
"loss": 0.5348, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.4181069958847738e-05, |
|
"loss": 0.559, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.417283950617284e-05, |
|
"loss": 0.5268, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4164609053497945e-05, |
|
"loss": 0.6154, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4156378600823047e-05, |
|
"loss": 0.5661, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4148148148148148e-05, |
|
"loss": 0.5869, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4139917695473254e-05, |
|
"loss": 0.5425, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4131687242798355e-05, |
|
"loss": 0.6204, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4123456790123457e-05, |
|
"loss": 0.588, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4115226337448559e-05, |
|
"loss": 0.54, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4106995884773664e-05, |
|
"loss": 0.5868, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4098765432098766e-05, |
|
"loss": 0.5485, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.409053497942387e-05, |
|
"loss": 0.5508, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4082304526748973e-05, |
|
"loss": 0.5631, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4074074074074075e-05, |
|
"loss": 0.5275, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4065843621399178e-05, |
|
"loss": 0.4996, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.405761316872428e-05, |
|
"loss": 0.5785, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4049382716049384e-05, |
|
"loss": 0.5726, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4041152263374487e-05, |
|
"loss": 0.5505, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4032921810699589e-05, |
|
"loss": 0.5351, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4024691358024694e-05, |
|
"loss": 0.5527, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4016460905349796e-05, |
|
"loss": 0.6152, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4008230452674898e-05, |
|
"loss": 0.5502, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.6209, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.3991769547325105e-05, |
|
"loss": 0.5697, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.3983539094650207e-05, |
|
"loss": 0.6031, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.3975308641975309e-05, |
|
"loss": 0.5595, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3967078189300414e-05, |
|
"loss": 0.5436, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3958847736625516e-05, |
|
"loss": 0.5315, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3950617283950617e-05, |
|
"loss": 0.5461, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3942386831275721e-05, |
|
"loss": 0.5997, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3934156378600824e-05, |
|
"loss": 0.5753, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3925925925925928e-05, |
|
"loss": 0.6, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.391769547325103e-05, |
|
"loss": 0.5839, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3909465020576133e-05, |
|
"loss": 0.5523, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3901234567901237e-05, |
|
"loss": 0.5073, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3893004115226339e-05, |
|
"loss": 0.5411, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.388477366255144e-05, |
|
"loss": 0.5323, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3876543209876546e-05, |
|
"loss": 0.56, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3868312757201647e-05, |
|
"loss": 0.5745, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.386008230452675e-05, |
|
"loss": 0.564, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3851851851851851e-05, |
|
"loss": 0.5238, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3843621399176956e-05, |
|
"loss": 0.5504, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3835390946502058e-05, |
|
"loss": 0.5233, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3827160493827162e-05, |
|
"loss": 0.5974, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3818930041152265e-05, |
|
"loss": 0.5541, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3810699588477367e-05, |
|
"loss": 0.5876, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.380246913580247e-05, |
|
"loss": 0.5584, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3794238683127572e-05, |
|
"loss": 0.5608, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3786008230452676e-05, |
|
"loss": 0.574, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.377777777777778e-05, |
|
"loss": 0.5196, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3769547325102881e-05, |
|
"loss": 0.6365, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3761316872427986e-05, |
|
"loss": 0.5549, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3753086419753088e-05, |
|
"loss": 0.5856, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.374485596707819e-05, |
|
"loss": 0.5147, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3736625514403292e-05, |
|
"loss": 0.6021, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3728395061728397e-05, |
|
"loss": 0.5549, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3720164609053499e-05, |
|
"loss": 0.5438, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.37119341563786e-05, |
|
"loss": 0.5106, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3703703703703706e-05, |
|
"loss": 0.5727, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3695473251028808e-05, |
|
"loss": 0.5395, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.368724279835391e-05, |
|
"loss": 0.5673, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3679012345679013e-05, |
|
"loss": 0.5574, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.3670781893004116e-05, |
|
"loss": 0.5215, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.366255144032922e-05, |
|
"loss": 0.5804, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3654320987654322e-05, |
|
"loss": 0.566, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3646090534979425e-05, |
|
"loss": 0.5701, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3637860082304529e-05, |
|
"loss": 0.5373, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.362962962962963e-05, |
|
"loss": 0.5228, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3621399176954732e-05, |
|
"loss": 0.5635, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3613168724279838e-05, |
|
"loss": 0.5801, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.360493827160494e-05, |
|
"loss": 0.5581, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3596707818930041e-05, |
|
"loss": 0.5835, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3588477366255147e-05, |
|
"loss": 0.5787, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3580246913580248e-05, |
|
"loss": 0.5476, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.357201646090535e-05, |
|
"loss": 0.5779, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.3563786008230454e-05, |
|
"loss": 0.5575, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 0.5214, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3547325102880659e-05, |
|
"loss": 0.563, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3539094650205762e-05, |
|
"loss": 0.6253, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3530864197530866e-05, |
|
"loss": 0.5631, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3522633744855968e-05, |
|
"loss": 0.6144, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3514403292181071e-05, |
|
"loss": 0.6165, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3506172839506173e-05, |
|
"loss": 0.5399, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3497942386831278e-05, |
|
"loss": 0.5727, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.348971193415638e-05, |
|
"loss": 0.5252, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3481481481481482e-05, |
|
"loss": 0.5758, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3473251028806587e-05, |
|
"loss": 0.6109, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.3465020576131689e-05, |
|
"loss": 0.5885, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.345679012345679e-05, |
|
"loss": 0.562, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3448559670781893e-05, |
|
"loss": 0.5528, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3440329218106998e-05, |
|
"loss": 0.5217, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.34320987654321e-05, |
|
"loss": 0.5424, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3423868312757201e-05, |
|
"loss": 0.538, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3415637860082307e-05, |
|
"loss": 0.5188, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3407407407407408e-05, |
|
"loss": 0.6183, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3399176954732512e-05, |
|
"loss": 0.5144, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3390946502057614e-05, |
|
"loss": 0.5709, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3382716049382717e-05, |
|
"loss": 0.5428, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.337448559670782e-05, |
|
"loss": 0.5923, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3366255144032923e-05, |
|
"loss": 0.5583, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.3358024691358024e-05, |
|
"loss": 0.5325, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.334979423868313e-05, |
|
"loss": 0.5393, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3341563786008231e-05, |
|
"loss": 0.565, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.5414, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3325102880658438e-05, |
|
"loss": 0.5507, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.331687242798354e-05, |
|
"loss": 0.5515, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3308641975308642e-05, |
|
"loss": 0.5435, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3300411522633746e-05, |
|
"loss": 0.4983, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3292181069958849e-05, |
|
"loss": 0.5572, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3283950617283951e-05, |
|
"loss": 0.5072, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3275720164609054e-05, |
|
"loss": 0.5471, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.3267489711934158e-05, |
|
"loss": 0.5981, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.325925925925926e-05, |
|
"loss": 0.6511, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3251028806584363e-05, |
|
"loss": 0.5418, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3242798353909465e-05, |
|
"loss": 0.5556, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.323456790123457e-05, |
|
"loss": 0.5495, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3226337448559672e-05, |
|
"loss": 0.5227, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3218106995884774e-05, |
|
"loss": 0.5318, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.320987654320988e-05, |
|
"loss": 0.5132, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3201646090534981e-05, |
|
"loss": 0.5707, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3193415637860083e-05, |
|
"loss": 0.5785, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3185185185185185e-05, |
|
"loss": 0.5222, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.317695473251029e-05, |
|
"loss": 0.5794, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3168724279835392e-05, |
|
"loss": 0.518, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3160493827160493e-05, |
|
"loss": 0.5476, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3152263374485599e-05, |
|
"loss": 0.5577, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.31440329218107e-05, |
|
"loss": 0.5607, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3135802469135804e-05, |
|
"loss": 0.5307, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3127572016460906e-05, |
|
"loss": 0.5105, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.311934156378601e-05, |
|
"loss": 0.5454, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 0.5369, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3102880658436215e-05, |
|
"loss": 0.5066, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3094650205761318e-05, |
|
"loss": 0.541, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3086419753086422e-05, |
|
"loss": 0.5774, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3078189300411523e-05, |
|
"loss": 0.5904, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3069958847736625e-05, |
|
"loss": 0.5244, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.306172839506173e-05, |
|
"loss": 0.5106, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3053497942386832e-05, |
|
"loss": 0.5299, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3045267489711934e-05, |
|
"loss": 0.5239, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.303703703703704e-05, |
|
"loss": 0.5468, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3028806584362141e-05, |
|
"loss": 0.5865, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3020576131687243e-05, |
|
"loss": 0.5563, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3012345679012346e-05, |
|
"loss": 0.6072, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.300411522633745e-05, |
|
"loss": 0.5662, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2995884773662552e-05, |
|
"loss": 0.5635, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2987654320987655e-05, |
|
"loss": 0.5604, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2979423868312759e-05, |
|
"loss": 0.5058, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2971193415637862e-05, |
|
"loss": 0.5042, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2962962962962964e-05, |
|
"loss": 0.5613, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2954732510288066e-05, |
|
"loss": 0.5305, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2946502057613171e-05, |
|
"loss": 0.5394, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2938271604938273e-05, |
|
"loss": 0.5814, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2930041152263375e-05, |
|
"loss": 0.5131, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2921810699588477e-05, |
|
"loss": 0.5241, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2913580246913582e-05, |
|
"loss": 0.5235, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2905349794238684e-05, |
|
"loss": 0.5118, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2897119341563787e-05, |
|
"loss": 0.5367, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.288888888888889e-05, |
|
"loss": 0.5856, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2880658436213992e-05, |
|
"loss": 0.5466, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2872427983539096e-05, |
|
"loss": 0.5816, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2864197530864198e-05, |
|
"loss": 0.583, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2855967078189301e-05, |
|
"loss": 0.5305, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2847736625514405e-05, |
|
"loss": 0.5382, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2839506172839507e-05, |
|
"loss": 0.5664, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.283127572016461e-05, |
|
"loss": 0.5923, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2823045267489714e-05, |
|
"loss": 0.5363, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2814814814814815e-05, |
|
"loss": 0.6103, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2806584362139917e-05, |
|
"loss": 0.5257, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2798353909465022e-05, |
|
"loss": 0.5366, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2790123456790124e-05, |
|
"loss": 0.5086, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2781893004115226e-05, |
|
"loss": 0.5616, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2773662551440331e-05, |
|
"loss": 0.5561, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2765432098765433e-05, |
|
"loss": 0.5222, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2757201646090535e-05, |
|
"loss": 0.5518, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2748971193415638e-05, |
|
"loss": 0.535, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2740740740740742e-05, |
|
"loss": 0.5305, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2732510288065845e-05, |
|
"loss": 0.5709, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2724279835390947e-05, |
|
"loss": 0.5262, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.271604938271605e-05, |
|
"loss": 0.5436, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2707818930041154e-05, |
|
"loss": 0.5176, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2699588477366256e-05, |
|
"loss": 0.4787, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2691358024691358e-05, |
|
"loss": 0.5263, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2683127572016463e-05, |
|
"loss": 0.5376, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2674897119341565e-05, |
|
"loss": 0.527, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2666666666666667e-05, |
|
"loss": 0.5277, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2658436213991772e-05, |
|
"loss": 0.5614, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2650205761316874e-05, |
|
"loss": 0.5321, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2641975308641976e-05, |
|
"loss": 0.5392, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.2633744855967079e-05, |
|
"loss": 0.5672, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2625514403292183e-05, |
|
"loss": 0.5775, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2617283950617284e-05, |
|
"loss": 0.5114, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2609053497942388e-05, |
|
"loss": 0.5067, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2600823045267491e-05, |
|
"loss": 0.5355, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2592592592592593e-05, |
|
"loss": 0.5516, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2584362139917697e-05, |
|
"loss": 0.5554, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2576131687242799e-05, |
|
"loss": 0.5103, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2567901234567904e-05, |
|
"loss": 0.5419, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2559670781893006e-05, |
|
"loss": 0.5428, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2551440329218107e-05, |
|
"loss": 0.5481, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2543209876543213e-05, |
|
"loss": 0.584, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.2534979423868314e-05, |
|
"loss": 0.5121, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2526748971193416e-05, |
|
"loss": 0.5165, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2518518518518518e-05, |
|
"loss": 0.5255, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2510288065843623e-05, |
|
"loss": 0.5154, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2502057613168725e-05, |
|
"loss": 0.5967, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2493827160493827e-05, |
|
"loss": 0.5582, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.248559670781893e-05, |
|
"loss": 0.5656, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2477366255144034e-05, |
|
"loss": 0.5534, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2469135802469137e-05, |
|
"loss": 0.5246, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.246090534979424e-05, |
|
"loss": 0.545, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2452674897119343e-05, |
|
"loss": 0.5125, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2444444444444446e-05, |
|
"loss": 0.6149, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.2436213991769548e-05, |
|
"loss": 0.537, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.242798353909465e-05, |
|
"loss": 0.5537, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2419753086419755e-05, |
|
"loss": 0.5295, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2411522633744857e-05, |
|
"loss": 0.5184, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2403292181069959e-05, |
|
"loss": 0.554, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2395061728395064e-05, |
|
"loss": 0.5095, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2386831275720166e-05, |
|
"loss": 0.5396, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2378600823045268e-05, |
|
"loss": 0.4929, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2370370370370371e-05, |
|
"loss": 0.542, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2362139917695475e-05, |
|
"loss": 0.5637, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2353909465020576e-05, |
|
"loss": 0.588, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.234567901234568e-05, |
|
"loss": 0.537, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2337448559670783e-05, |
|
"loss": 0.5304, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.2329218106995885e-05, |
|
"loss": 0.553, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2320987654320989e-05, |
|
"loss": 0.5283, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.231275720164609e-05, |
|
"loss": 0.5774, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2304526748971196e-05, |
|
"loss": 0.4815, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2296296296296298e-05, |
|
"loss": 0.5064, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.22880658436214e-05, |
|
"loss": 0.5605, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2279835390946505e-05, |
|
"loss": 0.5459, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2271604938271606e-05, |
|
"loss": 0.5687, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2263374485596708e-05, |
|
"loss": 0.5535, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.225514403292181e-05, |
|
"loss": 0.5241, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2246913580246915e-05, |
|
"loss": 0.5543, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2238683127572017e-05, |
|
"loss": 0.5427, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2230452674897119e-05, |
|
"loss": 0.4904, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.2222222222222224e-05, |
|
"loss": 0.5383, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2213991769547326e-05, |
|
"loss": 0.5762, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.220576131687243e-05, |
|
"loss": 0.5734, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2197530864197531e-05, |
|
"loss": 0.5125, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2189300411522635e-05, |
|
"loss": 0.5662, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2181069958847738e-05, |
|
"loss": 0.5318, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.217283950617284e-05, |
|
"loss": 0.5823, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2164609053497944e-05, |
|
"loss": 0.5218, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2156378600823047e-05, |
|
"loss": 0.5488, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2148148148148149e-05, |
|
"loss": 0.5567, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.213991769547325e-05, |
|
"loss": 0.5335, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2131687242798356e-05, |
|
"loss": 0.5162, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.2123456790123458e-05, |
|
"loss": 0.5105, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.211522633744856e-05, |
|
"loss": 0.5231, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2106995884773665e-05, |
|
"loss": 0.5246, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2098765432098767e-05, |
|
"loss": 0.526, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2090534979423868e-05, |
|
"loss": 0.5429, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2082304526748972e-05, |
|
"loss": 0.4968, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2074074074074075e-05, |
|
"loss": 0.5022, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2065843621399177e-05, |
|
"loss": 0.4795, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.205761316872428e-05, |
|
"loss": 0.5076, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2049382716049384e-05, |
|
"loss": 0.5456, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2041152263374488e-05, |
|
"loss": 0.5714, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.203292181069959e-05, |
|
"loss": 0.5429, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2024691358024691e-05, |
|
"loss": 0.5212, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.2016460905349797e-05, |
|
"loss": 0.5386, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2008230452674898e-05, |
|
"loss": 0.5516, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.5593, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1991769547325102e-05, |
|
"loss": 0.5362, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1983539094650207e-05, |
|
"loss": 0.5232, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1975308641975309e-05, |
|
"loss": 0.5048, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1967078189300411e-05, |
|
"loss": 0.5344, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1958847736625516e-05, |
|
"loss": 0.4771, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1950617283950618e-05, |
|
"loss": 0.5103, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1942386831275721e-05, |
|
"loss": 0.54, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1934156378600823e-05, |
|
"loss": 0.5716, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.1925925925925927e-05, |
|
"loss": 0.5293, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.191769547325103e-05, |
|
"loss": 0.5157, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1909465020576132e-05, |
|
"loss": 0.4781, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1901234567901236e-05, |
|
"loss": 0.524, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.189300411522634e-05, |
|
"loss": 0.4891, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1884773662551441e-05, |
|
"loss": 0.4941, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1876543209876543e-05, |
|
"loss": 0.5032, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1868312757201648e-05, |
|
"loss": 0.5092, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.186008230452675e-05, |
|
"loss": 0.5531, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1851851851851852e-05, |
|
"loss": 0.5453, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1843621399176957e-05, |
|
"loss": 0.475, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1835390946502059e-05, |
|
"loss": 0.533, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.182716049382716e-05, |
|
"loss": 0.5161, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1818930041152264e-05, |
|
"loss": 0.5217, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1810699588477367e-05, |
|
"loss": 0.5246, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.180246913580247e-05, |
|
"loss": 0.549, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1794238683127573e-05, |
|
"loss": 0.5247, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1786008230452676e-05, |
|
"loss": 0.4753, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.177777777777778e-05, |
|
"loss": 0.5278, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1769547325102882e-05, |
|
"loss": 0.5313, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1761316872427983e-05, |
|
"loss": 0.5325, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1753086419753089e-05, |
|
"loss": 0.5315, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.174485596707819e-05, |
|
"loss": 0.498, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1736625514403292e-05, |
|
"loss": 0.4958, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1728395061728398e-05, |
|
"loss": 0.5529, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.17201646090535e-05, |
|
"loss": 0.5157, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.1711934156378601e-05, |
|
"loss": 0.5425, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1703703703703703e-05, |
|
"loss": 0.5123, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1695473251028808e-05, |
|
"loss": 0.5409, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.168724279835391e-05, |
|
"loss": 0.5476, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1679012345679013e-05, |
|
"loss": 0.5187, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1670781893004117e-05, |
|
"loss": 0.4925, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1662551440329219e-05, |
|
"loss": 0.5379, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1654320987654322e-05, |
|
"loss": 0.5277, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1646090534979424e-05, |
|
"loss": 0.5101, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1637860082304528e-05, |
|
"loss": 0.5112, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1629629629629631e-05, |
|
"loss": 0.5484, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1621399176954733e-05, |
|
"loss": 0.5753, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.1613168724279838e-05, |
|
"loss": 0.5299, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 1.160493827160494e-05, |
|
"loss": 0.494, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1596707818930042e-05, |
|
"loss": 0.4865, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1588477366255144e-05, |
|
"loss": 0.5303, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1580246913580249e-05, |
|
"loss": 0.4744, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.157201646090535e-05, |
|
"loss": 0.5507, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1563786008230452e-05, |
|
"loss": 0.5021, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 0.5513, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.154732510288066e-05, |
|
"loss": 0.5133, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1539094650205761e-05, |
|
"loss": 0.5431, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1530864197530865e-05, |
|
"loss": 0.5259, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1522633744855968e-05, |
|
"loss": 0.539, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1514403292181072e-05, |
|
"loss": 0.493, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 1.1506172839506174e-05, |
|
"loss": 0.5295, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1497942386831275e-05, |
|
"loss": 0.5026, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.148971193415638e-05, |
|
"loss": 0.5472, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1481481481481482e-05, |
|
"loss": 0.5196, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1473251028806584e-05, |
|
"loss": 0.4716, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.146502057613169e-05, |
|
"loss": 0.5255, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1456790123456791e-05, |
|
"loss": 0.5915, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1448559670781893e-05, |
|
"loss": 0.5526, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1440329218106997e-05, |
|
"loss": 0.5044, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.14320987654321e-05, |
|
"loss": 0.4965, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1423868312757202e-05, |
|
"loss": 0.5098, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1415637860082305e-05, |
|
"loss": 0.516, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.1407407407407409e-05, |
|
"loss": 0.5183, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 1.139917695473251e-05, |
|
"loss": 0.4831, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1390946502057614e-05, |
|
"loss": 0.5953, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1382716049382716e-05, |
|
"loss": 0.5344, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.137448559670782e-05, |
|
"loss": 0.5231, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1366255144032923e-05, |
|
"loss": 0.5234, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1358024691358025e-05, |
|
"loss": 0.5165, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.134979423868313e-05, |
|
"loss": 0.5477, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1341563786008232e-05, |
|
"loss": 0.5189, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 0.5166, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1325102880658436e-05, |
|
"loss": 0.5519, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.131687242798354e-05, |
|
"loss": 0.5458, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1308641975308643e-05, |
|
"loss": 0.5448, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 1.1300411522633744e-05, |
|
"loss": 0.5349, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.129218106995885e-05, |
|
"loss": 0.4986, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1283950617283951e-05, |
|
"loss": 0.557, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1275720164609055e-05, |
|
"loss": 0.4802, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1267489711934157e-05, |
|
"loss": 0.4882, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.125925925925926e-05, |
|
"loss": 0.5489, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1251028806584364e-05, |
|
"loss": 0.5104, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1242798353909466e-05, |
|
"loss": 0.5004, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.123456790123457e-05, |
|
"loss": 0.5383, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1226337448559673e-05, |
|
"loss": 0.5246, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1218106995884774e-05, |
|
"loss": 0.5334, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1209876543209876e-05, |
|
"loss": 0.5431, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1201646090534982e-05, |
|
"loss": 0.4977, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 1.1193415637860083e-05, |
|
"loss": 0.5373, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1185185185185185e-05, |
|
"loss": 0.5645, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.117695473251029e-05, |
|
"loss": 0.5578, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1168724279835392e-05, |
|
"loss": 0.5313, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1160493827160494e-05, |
|
"loss": 0.5278, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1152263374485597e-05, |
|
"loss": 0.5202, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1144032921810701e-05, |
|
"loss": 0.5397, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1135802469135803e-05, |
|
"loss": 0.5067, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1127572016460906e-05, |
|
"loss": 0.5589, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1119341563786008e-05, |
|
"loss": 0.5103, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1111111111111113e-05, |
|
"loss": 0.5159, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1102880658436215e-05, |
|
"loss": 0.5444, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1094650205761317e-05, |
|
"loss": 0.5595, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 1.1086419753086422e-05, |
|
"loss": 0.515, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1078189300411524e-05, |
|
"loss": 0.5088, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1069958847736626e-05, |
|
"loss": 0.5397, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1061728395061728e-05, |
|
"loss": 0.4955, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1053497942386833e-05, |
|
"loss": 0.5262, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1045267489711935e-05, |
|
"loss": 0.555, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1037037037037036e-05, |
|
"loss": 0.5261, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1028806584362142e-05, |
|
"loss": 0.5182, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1020576131687243e-05, |
|
"loss": 0.5064, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1012345679012347e-05, |
|
"loss": 0.6149, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.1004115226337449e-05, |
|
"loss": 0.5296, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0995884773662552e-05, |
|
"loss": 0.5306, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 1.0987654320987656e-05, |
|
"loss": 0.5532, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0979423868312758e-05, |
|
"loss": 0.5017, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0971193415637861e-05, |
|
"loss": 0.5209, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0962962962962965e-05, |
|
"loss": 0.4912, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0954732510288066e-05, |
|
"loss": 0.5386, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0946502057613168e-05, |
|
"loss": 0.4777, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0938271604938273e-05, |
|
"loss": 0.5449, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0930041152263375e-05, |
|
"loss": 0.5153, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0921810699588477e-05, |
|
"loss": 0.5251, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0913580246913582e-05, |
|
"loss": 0.4846, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0905349794238684e-05, |
|
"loss": 0.4793, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0897119341563786e-05, |
|
"loss": 0.4974, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 0.5094, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.0880658436213993e-05, |
|
"loss": 0.5157, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0872427983539095e-05, |
|
"loss": 0.493, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0864197530864198e-05, |
|
"loss": 0.5304, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0855967078189302e-05, |
|
"loss": 0.514, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0847736625514405e-05, |
|
"loss": 0.5025, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0839506172839507e-05, |
|
"loss": 0.5248, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0831275720164609e-05, |
|
"loss": 0.5373, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0823045267489714e-05, |
|
"loss": 0.5149, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0814814814814816e-05, |
|
"loss": 0.5614, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0806584362139918e-05, |
|
"loss": 0.5571, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0798353909465023e-05, |
|
"loss": 0.512, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0790123456790125e-05, |
|
"loss": 0.5479, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.0781893004115227e-05, |
|
"loss": 0.5233, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0773662551440328e-05, |
|
"loss": 0.5727, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0765432098765434e-05, |
|
"loss": 0.5786, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0757201646090535e-05, |
|
"loss": 0.519, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0748971193415639e-05, |
|
"loss": 0.4767, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0740740740740742e-05, |
|
"loss": 0.5268, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0732510288065844e-05, |
|
"loss": 0.5637, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0724279835390948e-05, |
|
"loss": 0.4883, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.071604938271605e-05, |
|
"loss": 0.5147, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0707818930041153e-05, |
|
"loss": 0.5135, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0699588477366257e-05, |
|
"loss": 0.5313, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0691358024691358e-05, |
|
"loss": 0.4877, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0683127572016464e-05, |
|
"loss": 0.513, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0674897119341565e-05, |
|
"loss": 0.4865, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 0.5286, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0658436213991769e-05, |
|
"loss": 0.5058, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0650205761316874e-05, |
|
"loss": 0.4998, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0641975308641976e-05, |
|
"loss": 0.5163, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0633744855967078e-05, |
|
"loss": 0.5149, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0625514403292181e-05, |
|
"loss": 0.5005, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0617283950617285e-05, |
|
"loss": 0.4753, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0609053497942387e-05, |
|
"loss": 0.5009, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.060082304526749e-05, |
|
"loss": 0.5569, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0592592592592594e-05, |
|
"loss": 0.4954, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0584362139917697e-05, |
|
"loss": 0.4952, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.0576131687242799e-05, |
|
"loss": 0.4762, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0567901234567901e-05, |
|
"loss": 0.5076, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0559670781893006e-05, |
|
"loss": 0.486, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0551440329218108e-05, |
|
"loss": 0.502, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.054320987654321e-05, |
|
"loss": 0.5802, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0534979423868315e-05, |
|
"loss": 0.5708, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0526748971193417e-05, |
|
"loss": 0.5051, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0518518518518519e-05, |
|
"loss": 0.506, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.051028806584362e-05, |
|
"loss": 0.5356, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0502057613168726e-05, |
|
"loss": 0.5325, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0493827160493827e-05, |
|
"loss": 0.5199, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0485596707818931e-05, |
|
"loss": 0.5346, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0477366255144034e-05, |
|
"loss": 0.5627, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0469135802469136e-05, |
|
"loss": 0.5338, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.046090534979424e-05, |
|
"loss": 0.5085, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0452674897119342e-05, |
|
"loss": 0.5182, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 0.5225, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0436213991769549e-05, |
|
"loss": 0.5226, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.042798353909465e-05, |
|
"loss": 0.4991, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0419753086419756e-05, |
|
"loss": 0.5049, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0411522633744857e-05, |
|
"loss": 0.5078, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.040329218106996e-05, |
|
"loss": 0.6137, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0395061728395061e-05, |
|
"loss": 0.5137, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0386831275720166e-05, |
|
"loss": 0.5034, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0378600823045268e-05, |
|
"loss": 0.5338, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.037037037037037e-05, |
|
"loss": 0.5104, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0362139917695475e-05, |
|
"loss": 0.5049, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0353909465020577e-05, |
|
"loss": 0.4552, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0345679012345679e-05, |
|
"loss": 0.5168, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0337448559670782e-05, |
|
"loss": 0.4915, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0329218106995886e-05, |
|
"loss": 0.504, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.032098765432099e-05, |
|
"loss": 0.4946, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0312757201646091e-05, |
|
"loss": 0.5057, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0304526748971195e-05, |
|
"loss": 0.5022, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0296296296296298e-05, |
|
"loss": 0.5264, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.02880658436214e-05, |
|
"loss": 0.4802, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0279835390946502e-05, |
|
"loss": 0.4995, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0271604938271607e-05, |
|
"loss": 0.4696, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0263374485596709e-05, |
|
"loss": 0.4868, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.025514403292181e-05, |
|
"loss": 0.474, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0246913580246916e-05, |
|
"loss": 0.4845, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0238683127572018e-05, |
|
"loss": 0.5053, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.023045267489712e-05, |
|
"loss": 0.4865, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 0.4582, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0213991769547326e-05, |
|
"loss": 0.4529, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0205761316872428e-05, |
|
"loss": 0.4882, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0197530864197532e-05, |
|
"loss": 0.4321, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0189300411522634e-05, |
|
"loss": 0.4572, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0181069958847737e-05, |
|
"loss": 0.4207, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.017283950617284e-05, |
|
"loss": 0.4671, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 1.0164609053497942e-05, |
|
"loss": 0.4195, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0156378600823048e-05, |
|
"loss": 0.4259, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.014814814814815e-05, |
|
"loss": 0.4434, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0139917695473251e-05, |
|
"loss": 0.4646, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0131687242798353e-05, |
|
"loss": 0.4251, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0123456790123458e-05, |
|
"loss": 0.4445, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.011522633744856e-05, |
|
"loss": 0.4697, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0106995884773662e-05, |
|
"loss": 0.3973, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0098765432098767e-05, |
|
"loss": 0.4281, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0090534979423869e-05, |
|
"loss": 0.4355, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.008230452674897e-05, |
|
"loss": 0.4301, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0074074074074074e-05, |
|
"loss": 0.417, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0065843621399178e-05, |
|
"loss": 0.4208, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 1.0057613168724281e-05, |
|
"loss": 0.4019, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0049382716049383e-05, |
|
"loss": 0.3739, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0041152263374487e-05, |
|
"loss": 0.3848, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.003292181069959e-05, |
|
"loss": 0.3868, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0024691358024692e-05, |
|
"loss": 0.4228, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0016460905349794e-05, |
|
"loss": 0.3993, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1.0008230452674899e-05, |
|
"loss": 0.3704, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3627, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.991769547325104e-06, |
|
"loss": 0.3864, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.983539094650206e-06, |
|
"loss": 0.4302, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.97530864197531e-06, |
|
"loss": 0.4156, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.967078189300411e-06, |
|
"loss": 0.3256, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.958847736625515e-06, |
|
"loss": 0.3605, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.950617283950618e-06, |
|
"loss": 0.3694, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.94238683127572e-06, |
|
"loss": 0.3475, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.934156378600824e-06, |
|
"loss": 0.3735, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.925925925925927e-06, |
|
"loss": 0.397, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.91769547325103e-06, |
|
"loss": 0.3492, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.909465020576133e-06, |
|
"loss": 0.3531, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.901234567901236e-06, |
|
"loss": 0.3487, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.893004115226338e-06, |
|
"loss": 0.3534, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.884773662551441e-06, |
|
"loss": 0.3829, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.876543209876543e-06, |
|
"loss": 0.3128, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.868312757201647e-06, |
|
"loss": 0.3337, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.86008230452675e-06, |
|
"loss": 0.3631, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.851851851851852e-06, |
|
"loss": 0.3663, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.843621399176956e-06, |
|
"loss": 0.3512, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.835390946502057e-06, |
|
"loss": 0.3322, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.827160493827161e-06, |
|
"loss": 0.3696, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.818930041152264e-06, |
|
"loss": 0.3548, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.810699588477366e-06, |
|
"loss": 0.299, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.80246913580247e-06, |
|
"loss": 0.3024, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.794238683127573e-06, |
|
"loss": 0.3428, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.786008230452677e-06, |
|
"loss": 0.3382, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 0.3013, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.769547325102882e-06, |
|
"loss": 0.2947, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.761316872427984e-06, |
|
"loss": 0.3174, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.753086419753087e-06, |
|
"loss": 0.2942, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.744855967078191e-06, |
|
"loss": 0.2798, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.736625514403293e-06, |
|
"loss": 0.3103, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.728395061728396e-06, |
|
"loss": 0.2699, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.720164609053498e-06, |
|
"loss": 0.3081, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.711934156378602e-06, |
|
"loss": 0.2962, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.703703703703703e-06, |
|
"loss": 0.3059, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.695473251028807e-06, |
|
"loss": 0.3101, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.68724279835391e-06, |
|
"loss": 0.3246, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.679012345679012e-06, |
|
"loss": 0.3194, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.670781893004116e-06, |
|
"loss": 0.2728, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.66255144032922e-06, |
|
"loss": 0.2989, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.654320987654323e-06, |
|
"loss": 0.2716, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 9.646090534979425e-06, |
|
"loss": 0.3631, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.637860082304528e-06, |
|
"loss": 0.3426, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.62962962962963e-06, |
|
"loss": 0.3272, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.621399176954733e-06, |
|
"loss": 0.2949, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.613168724279837e-06, |
|
"loss": 0.3826, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.604938271604939e-06, |
|
"loss": 0.3356, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.596707818930042e-06, |
|
"loss": 0.3237, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.588477366255144e-06, |
|
"loss": 0.3324, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.580246913580248e-06, |
|
"loss": 0.2777, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.57201646090535e-06, |
|
"loss": 0.3447, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.563786008230453e-06, |
|
"loss": 0.3217, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 0.2932, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.547325102880658e-06, |
|
"loss": 0.2861, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.539094650205762e-06, |
|
"loss": 0.326, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.530864197530865e-06, |
|
"loss": 0.2734, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.522633744855969e-06, |
|
"loss": 0.3054, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.51440329218107e-06, |
|
"loss": 0.2998, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.506172839506174e-06, |
|
"loss": 0.3123, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.497942386831278e-06, |
|
"loss": 0.2947, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.48971193415638e-06, |
|
"loss": 0.3058, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.481481481481483e-06, |
|
"loss": 0.2916, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.473251028806585e-06, |
|
"loss": 0.2898, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.465020576131688e-06, |
|
"loss": 0.2776, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.45679012345679e-06, |
|
"loss": 0.3094, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.448559670781894e-06, |
|
"loss": 0.2882, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.440329218106997e-06, |
|
"loss": 0.2911, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.432098765432099e-06, |
|
"loss": 0.2657, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.423868312757202e-06, |
|
"loss": 0.2731, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.415637860082304e-06, |
|
"loss": 0.2729, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.407407407407408e-06, |
|
"loss": 0.3034, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.399176954732511e-06, |
|
"loss": 0.2899, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.390946502057615e-06, |
|
"loss": 0.2782, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.382716049382717e-06, |
|
"loss": 0.2776, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.37448559670782e-06, |
|
"loss": 0.2737, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.366255144032924e-06, |
|
"loss": 0.2793, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.358024691358025e-06, |
|
"loss": 0.2729, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.349794238683129e-06, |
|
"loss": 0.3065, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.34156378600823e-06, |
|
"loss": 0.2739, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 0.2853, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.325102880658436e-06, |
|
"loss": 0.3306, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.31687242798354e-06, |
|
"loss": 0.286, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.308641975308643e-06, |
|
"loss": 0.2735, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.300411522633745e-06, |
|
"loss": 0.2606, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.292181069958848e-06, |
|
"loss": 0.2836, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.28395061728395e-06, |
|
"loss": 0.2723, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.275720164609054e-06, |
|
"loss": 0.2472, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.267489711934157e-06, |
|
"loss": 0.3006, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.25925925925926e-06, |
|
"loss": 0.3048, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.251028806584363e-06, |
|
"loss": 0.2849, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.242798353909466e-06, |
|
"loss": 0.2946, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.23456790123457e-06, |
|
"loss": 0.2828, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.226337448559671e-06, |
|
"loss": 0.2904, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.218106995884775e-06, |
|
"loss": 0.324, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.209876543209877e-06, |
|
"loss": 0.2671, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.20164609053498e-06, |
|
"loss": 0.273, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.193415637860082e-06, |
|
"loss": 0.2793, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.185185185185186e-06, |
|
"loss": 0.2938, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.17695473251029e-06, |
|
"loss": 0.3201, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.168724279835391e-06, |
|
"loss": 0.2743, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.160493827160494e-06, |
|
"loss": 0.2627, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.152263374485596e-06, |
|
"loss": 0.3088, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.1440329218107e-06, |
|
"loss": 0.3134, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.135802469135803e-06, |
|
"loss": 0.2767, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 9.127572016460907e-06, |
|
"loss": 0.2863, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.119341563786009e-06, |
|
"loss": 0.2322, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 0.2918, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.102880658436216e-06, |
|
"loss": 0.2813, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.094650205761317e-06, |
|
"loss": 0.2957, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.086419753086421e-06, |
|
"loss": 0.3009, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.078189300411523e-06, |
|
"loss": 0.2558, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.069958847736626e-06, |
|
"loss": 0.2943, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.06172839506173e-06, |
|
"loss": 0.3802, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.053497942386832e-06, |
|
"loss": 0.2981, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.045267489711935e-06, |
|
"loss": 0.2869, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.037037037037037e-06, |
|
"loss": 0.3341, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.02880658436214e-06, |
|
"loss": 0.2696, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.020576131687242e-06, |
|
"loss": 0.2485, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.012345679012346e-06, |
|
"loss": 0.2582, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.00411522633745e-06, |
|
"loss": 0.259, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.995884773662553e-06, |
|
"loss": 0.2742, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.987654320987655e-06, |
|
"loss": 0.2547, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.979423868312758e-06, |
|
"loss": 0.248, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.971193415637862e-06, |
|
"loss": 0.3047, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.962962962962963e-06, |
|
"loss": 0.2786, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.954732510288067e-06, |
|
"loss": 0.2941, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.946502057613169e-06, |
|
"loss": 0.2915, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.938271604938272e-06, |
|
"loss": 0.2721, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.930041152263376e-06, |
|
"loss": 0.2718, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.921810699588478e-06, |
|
"loss": 0.2706, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.913580246913581e-06, |
|
"loss": 0.2611, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.905349794238683e-06, |
|
"loss": 0.2819, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.897119341563786e-06, |
|
"loss": 0.2635, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.2758, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.880658436213992e-06, |
|
"loss": 0.2855, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.872427983539095e-06, |
|
"loss": 0.2823, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.864197530864199e-06, |
|
"loss": 0.2946, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.8559670781893e-06, |
|
"loss": 0.3063, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.847736625514404e-06, |
|
"loss": 0.2328, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.839506172839508e-06, |
|
"loss": 0.2344, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.83127572016461e-06, |
|
"loss": 0.2858, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.823045267489713e-06, |
|
"loss": 0.3126, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.814814814814817e-06, |
|
"loss": 0.2863, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.806584362139918e-06, |
|
"loss": 0.2392, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.798353909465022e-06, |
|
"loss": 0.2569, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.790123456790124e-06, |
|
"loss": 0.3039, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.781893004115227e-06, |
|
"loss": 0.3257, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.773662551440329e-06, |
|
"loss": 0.2793, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.765432098765432e-06, |
|
"loss": 0.262, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.757201646090536e-06, |
|
"loss": 0.2747, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.748971193415638e-06, |
|
"loss": 0.2523, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.740740740740741e-06, |
|
"loss": 0.2514, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.732510288065845e-06, |
|
"loss": 0.2906, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.724279835390947e-06, |
|
"loss": 0.2631, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.71604938271605e-06, |
|
"loss": 0.2677, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.707818930041154e-06, |
|
"loss": 0.2718, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.699588477366255e-06, |
|
"loss": 0.2903, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.691358024691359e-06, |
|
"loss": 0.3198, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.683127572016463e-06, |
|
"loss": 0.2662, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.674897119341564e-06, |
|
"loss": 0.282, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 0.2757, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.65843621399177e-06, |
|
"loss": 0.2874, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.650205761316873e-06, |
|
"loss": 0.2843, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.641975308641975e-06, |
|
"loss": 0.2909, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.633744855967078e-06, |
|
"loss": 0.2664, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.625514403292182e-06, |
|
"loss": 0.3829, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.617283950617284e-06, |
|
"loss": 0.3079, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.609053497942387e-06, |
|
"loss": 0.3027, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.60082304526749e-06, |
|
"loss": 0.2687, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.592592592592593e-06, |
|
"loss": 0.2745, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.584362139917696e-06, |
|
"loss": 0.251, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.5761316872428e-06, |
|
"loss": 0.251, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.567901234567903e-06, |
|
"loss": 0.2884, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.559670781893005e-06, |
|
"loss": 0.2972, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.551440329218108e-06, |
|
"loss": 0.316, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.54320987654321e-06, |
|
"loss": 0.2755, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.534979423868314e-06, |
|
"loss": 0.3401, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.526748971193416e-06, |
|
"loss": 0.2628, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.518518518518519e-06, |
|
"loss": 0.2902, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.510288065843621e-06, |
|
"loss": 0.2714, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.502057613168724e-06, |
|
"loss": 0.2779, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.493827160493828e-06, |
|
"loss": 0.2788, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.48559670781893e-06, |
|
"loss": 0.26, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.477366255144033e-06, |
|
"loss": 0.2766, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.469135802469137e-06, |
|
"loss": 0.23, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.46090534979424e-06, |
|
"loss": 0.3091, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.452674897119342e-06, |
|
"loss": 0.2483, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 0.2824, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.43621399176955e-06, |
|
"loss": 0.2732, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.427983539094651e-06, |
|
"loss": 0.2535, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.419753086419754e-06, |
|
"loss": 0.2917, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 8.411522633744856e-06, |
|
"loss": 0.2779, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.40329218106996e-06, |
|
"loss": 0.2714, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.395061728395062e-06, |
|
"loss": 0.2835, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.386831275720165e-06, |
|
"loss": 0.2868, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.378600823045269e-06, |
|
"loss": 0.277, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.37037037037037e-06, |
|
"loss": 0.3242, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.362139917695474e-06, |
|
"loss": 0.2791, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.353909465020576e-06, |
|
"loss": 0.2954, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.34567901234568e-06, |
|
"loss": 0.3437, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.337448559670783e-06, |
|
"loss": 0.3047, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.329218106995886e-06, |
|
"loss": 0.2606, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.320987654320988e-06, |
|
"loss": 0.2764, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.312757201646092e-06, |
|
"loss": 0.3562, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.304526748971195e-06, |
|
"loss": 0.2678, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.296296296296297e-06, |
|
"loss": 0.2884, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.2880658436214e-06, |
|
"loss": 0.2849, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.279835390946502e-06, |
|
"loss": 0.2832, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.271604938271606e-06, |
|
"loss": 0.2761, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.263374485596708e-06, |
|
"loss": 0.2366, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.255144032921811e-06, |
|
"loss": 0.2619, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.246913580246915e-06, |
|
"loss": 0.2568, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.238683127572016e-06, |
|
"loss": 0.2505, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.23045267489712e-06, |
|
"loss": 0.272, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.222222222222222e-06, |
|
"loss": 0.3069, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.213991769547325e-06, |
|
"loss": 0.2972, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.205761316872429e-06, |
|
"loss": 0.3149, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.197530864197532e-06, |
|
"loss": 0.271, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.189300411522634e-06, |
|
"loss": 0.2989, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.181069958847738e-06, |
|
"loss": 0.2547, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.172839506172841e-06, |
|
"loss": 0.2726, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.164609053497943e-06, |
|
"loss": 0.3303, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.156378600823046e-06, |
|
"loss": 0.3004, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.148148148148148e-06, |
|
"loss": 0.259, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.139917695473252e-06, |
|
"loss": 0.2723, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.131687242798355e-06, |
|
"loss": 0.2705, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.123456790123457e-06, |
|
"loss": 0.3152, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.11522633744856e-06, |
|
"loss": 0.2565, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.106995884773662e-06, |
|
"loss": 0.3491, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.098765432098766e-06, |
|
"loss": 0.274, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.090534979423868e-06, |
|
"loss": 0.2731, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.082304526748971e-06, |
|
"loss": 0.2679, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.074074074074075e-06, |
|
"loss": 0.2645, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.065843621399178e-06, |
|
"loss": 0.4237, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.05761316872428e-06, |
|
"loss": 0.2785, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.049382716049384e-06, |
|
"loss": 0.2532, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.041152263374487e-06, |
|
"loss": 0.2598, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.032921810699589e-06, |
|
"loss": 0.2415, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.024691358024692e-06, |
|
"loss": 0.2627, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.016460905349794e-06, |
|
"loss": 0.2991, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.008230452674898e-06, |
|
"loss": 0.2986, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.267, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.991769547325103e-06, |
|
"loss": 0.2931, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.983539094650207e-06, |
|
"loss": 0.2944, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.975308641975308e-06, |
|
"loss": 0.451, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.967078189300412e-06, |
|
"loss": 0.2745, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.958847736625514e-06, |
|
"loss": 0.301, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.950617283950617e-06, |
|
"loss": 0.2736, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.94238683127572e-06, |
|
"loss": 0.3158, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.934156378600824e-06, |
|
"loss": 0.3097, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.925925925925926e-06, |
|
"loss": 0.2732, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.91769547325103e-06, |
|
"loss": 0.2548, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.909465020576133e-06, |
|
"loss": 0.2691, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.901234567901235e-06, |
|
"loss": 0.2645, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 7.893004115226338e-06, |
|
"loss": 0.3265, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.884773662551442e-06, |
|
"loss": 0.3058, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.876543209876544e-06, |
|
"loss": 0.2806, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.868312757201647e-06, |
|
"loss": 0.2949, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.860082304526749e-06, |
|
"loss": 0.3447, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.851851851851853e-06, |
|
"loss": 0.4751, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.843621399176954e-06, |
|
"loss": 0.3162, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.835390946502058e-06, |
|
"loss": 0.2954, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.82716049382716e-06, |
|
"loss": 0.2666, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.818930041152263e-06, |
|
"loss": 0.409, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.810699588477367e-06, |
|
"loss": 0.3183, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.80246913580247e-06, |
|
"loss": 0.2919, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 7.794238683127572e-06, |
|
"loss": 0.2778, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.786008230452676e-06, |
|
"loss": 0.2945, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.2922, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.769547325102881e-06, |
|
"loss": 0.2354, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.761316872427984e-06, |
|
"loss": 0.2466, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.753086419753088e-06, |
|
"loss": 0.3021, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.74485596707819e-06, |
|
"loss": 0.3685, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.736625514403293e-06, |
|
"loss": 0.2941, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.728395061728395e-06, |
|
"loss": 0.2417, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.720164609053499e-06, |
|
"loss": 0.2903, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.7119341563786e-06, |
|
"loss": 0.2588, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.703703703703704e-06, |
|
"loss": 0.2874, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.695473251028807e-06, |
|
"loss": 0.2368, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 7.68724279835391e-06, |
|
"loss": 0.2746, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.679012345679013e-06, |
|
"loss": 0.275, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.670781893004116e-06, |
|
"loss": 0.3024, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.662551440329218e-06, |
|
"loss": 0.4758, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.654320987654322e-06, |
|
"loss": 0.2927, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.646090534979425e-06, |
|
"loss": 0.2678, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.637860082304529e-06, |
|
"loss": 0.2603, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.62962962962963e-06, |
|
"loss": 0.3039, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.621399176954733e-06, |
|
"loss": 0.2833, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.613168724279836e-06, |
|
"loss": 0.2665, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.604938271604939e-06, |
|
"loss": 0.2614, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.596707818930041e-06, |
|
"loss": 0.2517, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.588477366255145e-06, |
|
"loss": 0.2541, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 7.580246913580247e-06, |
|
"loss": 0.3003, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.57201646090535e-06, |
|
"loss": 0.2794, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.5637860082304535e-06, |
|
"loss": 0.2739, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 0.28, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.54732510288066e-06, |
|
"loss": 0.2259, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.5390946502057615e-06, |
|
"loss": 0.2573, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.530864197530865e-06, |
|
"loss": 0.344, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.522633744855967e-06, |
|
"loss": 0.2811, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.51440329218107e-06, |
|
"loss": 0.2761, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.506172839506174e-06, |
|
"loss": 0.2735, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.4979423868312765e-06, |
|
"loss": 0.229, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.489711934156379e-06, |
|
"loss": 0.2578, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 7.481481481481482e-06, |
|
"loss": 0.2816, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.473251028806585e-06, |
|
"loss": 0.2523, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.465020576131687e-06, |
|
"loss": 0.2721, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.456790123456791e-06, |
|
"loss": 0.2323, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.448559670781894e-06, |
|
"loss": 0.2423, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.440329218106996e-06, |
|
"loss": 0.3098, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.4320987654320995e-06, |
|
"loss": 0.36, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.423868312757202e-06, |
|
"loss": 0.2811, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.415637860082306e-06, |
|
"loss": 0.2509, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 0.2693, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.399176954732511e-06, |
|
"loss": 0.2706, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.3909465020576145e-06, |
|
"loss": 0.259, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.382716049382716e-06, |
|
"loss": 0.2704, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 7.37448559670782e-06, |
|
"loss": 0.2847, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.3662551440329225e-06, |
|
"loss": 0.2656, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.358024691358025e-06, |
|
"loss": 0.2641, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.349794238683128e-06, |
|
"loss": 0.3288, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.341563786008231e-06, |
|
"loss": 0.3277, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.333333333333333e-06, |
|
"loss": 0.3049, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.325102880658437e-06, |
|
"loss": 0.3834, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.31687242798354e-06, |
|
"loss": 0.2356, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.308641975308642e-06, |
|
"loss": 0.274, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.3004115226337455e-06, |
|
"loss": 0.286, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.292181069958848e-06, |
|
"loss": 0.3289, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.283950617283952e-06, |
|
"loss": 0.2502, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 7.2757201646090535e-06, |
|
"loss": 0.3189, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.267489711934157e-06, |
|
"loss": 0.2945, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.2592592592592605e-06, |
|
"loss": 0.3027, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.251028806584362e-06, |
|
"loss": 0.3287, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.242798353909466e-06, |
|
"loss": 0.4754, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.2345679012345685e-06, |
|
"loss": 0.2683, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.226337448559671e-06, |
|
"loss": 0.265, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.218106995884774e-06, |
|
"loss": 0.2786, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.209876543209877e-06, |
|
"loss": 0.3648, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.201646090534981e-06, |
|
"loss": 0.2753, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.193415637860083e-06, |
|
"loss": 0.2185, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.185185185185186e-06, |
|
"loss": 0.2811, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.176954732510289e-06, |
|
"loss": 0.216, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 7.1687242798353915e-06, |
|
"loss": 0.2594, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.160493827160494e-06, |
|
"loss": 0.2797, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.152263374485598e-06, |
|
"loss": 0.2505, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.1440329218106995e-06, |
|
"loss": 0.2536, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.135802469135803e-06, |
|
"loss": 0.2855, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.1275720164609065e-06, |
|
"loss": 0.266, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.119341563786008e-06, |
|
"loss": 0.2501, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 0.3099, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.1028806584362145e-06, |
|
"loss": 0.2851, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.094650205761318e-06, |
|
"loss": 0.2726, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.08641975308642e-06, |
|
"loss": 0.2673, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.078189300411523e-06, |
|
"loss": 0.2446, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 7.069958847736627e-06, |
|
"loss": 0.2295, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.061728395061729e-06, |
|
"loss": 0.3588, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.053497942386832e-06, |
|
"loss": 0.2556, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.045267489711935e-06, |
|
"loss": 0.2297, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.0370370370370375e-06, |
|
"loss": 0.2432, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.02880658436214e-06, |
|
"loss": 0.2508, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.020576131687244e-06, |
|
"loss": 0.2527, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.012345679012347e-06, |
|
"loss": 0.2569, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 7.004115226337449e-06, |
|
"loss": 0.2654, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.9958847736625525e-06, |
|
"loss": 0.2859, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.987654320987654e-06, |
|
"loss": 0.2399, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.979423868312758e-06, |
|
"loss": 0.2492, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.9711934156378605e-06, |
|
"loss": 0.245, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 6.962962962962964e-06, |
|
"loss": 0.2688, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.954732510288067e-06, |
|
"loss": 0.2423, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.946502057613169e-06, |
|
"loss": 0.2782, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.938271604938273e-06, |
|
"loss": 0.2659, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.930041152263375e-06, |
|
"loss": 0.3015, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.921810699588478e-06, |
|
"loss": 0.2669, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.913580246913581e-06, |
|
"loss": 0.2551, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.9053497942386835e-06, |
|
"loss": 0.2698, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.897119341563786e-06, |
|
"loss": 0.2514, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.88888888888889e-06, |
|
"loss": 0.2637, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.880658436213993e-06, |
|
"loss": 0.241, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.872427983539095e-06, |
|
"loss": 0.2954, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 6.8641975308641985e-06, |
|
"loss": 0.2362, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.8559670781893e-06, |
|
"loss": 0.3149, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.847736625514404e-06, |
|
"loss": 0.2798, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.8395061728395065e-06, |
|
"loss": 0.2677, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.83127572016461e-06, |
|
"loss": 0.3053, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.823045267489713e-06, |
|
"loss": 0.2534, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.814814814814815e-06, |
|
"loss": 0.262, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.806584362139919e-06, |
|
"loss": 0.2628, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.798353909465021e-06, |
|
"loss": 0.2769, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.790123456790124e-06, |
|
"loss": 0.2958, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.781893004115227e-06, |
|
"loss": 0.2534, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.7736625514403295e-06, |
|
"loss": 0.3071, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.765432098765433e-06, |
|
"loss": 0.2658, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 6.757201646090536e-06, |
|
"loss": 0.2977, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.748971193415639e-06, |
|
"loss": 0.2461, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.740740740740741e-06, |
|
"loss": 0.2745, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.7325102880658445e-06, |
|
"loss": 0.2304, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.724279835390946e-06, |
|
"loss": 0.2657, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.71604938271605e-06, |
|
"loss": 0.2386, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.707818930041153e-06, |
|
"loss": 0.2398, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.699588477366256e-06, |
|
"loss": 0.2585, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.691358024691359e-06, |
|
"loss": 0.2314, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.683127572016461e-06, |
|
"loss": 0.277, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.674897119341565e-06, |
|
"loss": 0.2823, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.2378, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 6.65843621399177e-06, |
|
"loss": 0.2814, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.650205761316873e-06, |
|
"loss": 0.2841, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.6419753086419755e-06, |
|
"loss": 0.2474, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.633744855967079e-06, |
|
"loss": 0.2631, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.625514403292182e-06, |
|
"loss": 0.2627, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.617283950617285e-06, |
|
"loss": 0.2239, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.609053497942387e-06, |
|
"loss": 0.2745, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.6008230452674905e-06, |
|
"loss": 0.2687, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.592592592592592e-06, |
|
"loss": 0.2776, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.584362139917696e-06, |
|
"loss": 0.2357, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.576131687242799e-06, |
|
"loss": 0.366, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.567901234567902e-06, |
|
"loss": 0.2441, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.559670781893005e-06, |
|
"loss": 0.2377, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 6.551440329218107e-06, |
|
"loss": 0.2497, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.543209876543211e-06, |
|
"loss": 0.2438, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.534979423868313e-06, |
|
"loss": 0.2606, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.526748971193416e-06, |
|
"loss": 0.291, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.51851851851852e-06, |
|
"loss": 0.2583, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.5102880658436215e-06, |
|
"loss": 0.283, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.502057613168725e-06, |
|
"loss": 0.2787, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.493827160493828e-06, |
|
"loss": 0.2844, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.485596707818931e-06, |
|
"loss": 0.2465, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.477366255144033e-06, |
|
"loss": 0.2394, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.4691358024691365e-06, |
|
"loss": 0.2609, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.460905349794238e-06, |
|
"loss": 0.2292, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 6.452674897119342e-06, |
|
"loss": 0.2665, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.444444444444445e-06, |
|
"loss": 0.2704, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.436213991769548e-06, |
|
"loss": 0.3177, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.427983539094651e-06, |
|
"loss": 0.2499, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.419753086419753e-06, |
|
"loss": 0.2739, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.411522633744857e-06, |
|
"loss": 0.2745, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.403292181069959e-06, |
|
"loss": 0.2273, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.395061728395062e-06, |
|
"loss": 0.243, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.386831275720166e-06, |
|
"loss": 0.2467, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.3786008230452675e-06, |
|
"loss": 0.304, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.370370370370371e-06, |
|
"loss": 0.2474, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.362139917695474e-06, |
|
"loss": 0.2651, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.353909465020577e-06, |
|
"loss": 0.2351, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 6.345679012345679e-06, |
|
"loss": 0.2662, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.3374485596707825e-06, |
|
"loss": 0.2575, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.329218106995886e-06, |
|
"loss": 0.2544, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.320987654320988e-06, |
|
"loss": 0.2951, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.312757201646091e-06, |
|
"loss": 0.2719, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.304526748971194e-06, |
|
"loss": 0.2594, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.296296296296297e-06, |
|
"loss": 0.2644, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.288065843621399e-06, |
|
"loss": 0.2658, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.279835390946503e-06, |
|
"loss": 0.2566, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.271604938271606e-06, |
|
"loss": 0.2543, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.263374485596708e-06, |
|
"loss": 0.2477, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.255144032921812e-06, |
|
"loss": 0.2705, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 6.2469135802469135e-06, |
|
"loss": 0.243, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.238683127572017e-06, |
|
"loss": 0.2465, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.23045267489712e-06, |
|
"loss": 0.2333, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.222222222222223e-06, |
|
"loss": 0.2198, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.213991769547325e-06, |
|
"loss": 0.2459, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.2057613168724285e-06, |
|
"loss": 0.2639, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.197530864197532e-06, |
|
"loss": 0.2528, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.189300411522634e-06, |
|
"loss": 0.2856, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.181069958847737e-06, |
|
"loss": 0.2288, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.17283950617284e-06, |
|
"loss": 0.2705, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.164609053497943e-06, |
|
"loss": 0.2938, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.156378600823045e-06, |
|
"loss": 0.2778, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.148148148148149e-06, |
|
"loss": 0.2585, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 6.139917695473252e-06, |
|
"loss": 0.2379, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.131687242798354e-06, |
|
"loss": 0.2672, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.123456790123458e-06, |
|
"loss": 0.2544, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.1152263374485595e-06, |
|
"loss": 0.2563, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.106995884773663e-06, |
|
"loss": 0.2344, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.098765432098766e-06, |
|
"loss": 0.311, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.090534979423869e-06, |
|
"loss": 0.2623, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.082304526748972e-06, |
|
"loss": 0.2345, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.0740740740740745e-06, |
|
"loss": 0.2498, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.065843621399178e-06, |
|
"loss": 0.2848, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.05761316872428e-06, |
|
"loss": 0.2318, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.049382716049383e-06, |
|
"loss": 0.258, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 6.041152263374486e-06, |
|
"loss": 0.3022, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.032921810699589e-06, |
|
"loss": 0.2558, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.024691358024692e-06, |
|
"loss": 0.3015, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.016460905349795e-06, |
|
"loss": 0.2086, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6.008230452674898e-06, |
|
"loss": 0.2304, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 6e-06, |
|
"loss": 0.2591, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.991769547325104e-06, |
|
"loss": 0.2306, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.9835390946502055e-06, |
|
"loss": 0.2252, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.975308641975309e-06, |
|
"loss": 0.2824, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.967078189300412e-06, |
|
"loss": 0.2137, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.958847736625515e-06, |
|
"loss": 0.2446, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.950617283950618e-06, |
|
"loss": 0.2717, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.9423868312757205e-06, |
|
"loss": 0.2344, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 5.934156378600824e-06, |
|
"loss": 0.2899, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.925925925925926e-06, |
|
"loss": 0.302, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.917695473251029e-06, |
|
"loss": 0.2488, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.909465020576132e-06, |
|
"loss": 0.2415, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.901234567901235e-06, |
|
"loss": 0.2803, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.893004115226338e-06, |
|
"loss": 0.1996, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.884773662551441e-06, |
|
"loss": 0.2424, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.876543209876544e-06, |
|
"loss": 0.222, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.868312757201646e-06, |
|
"loss": 0.3162, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.86008230452675e-06, |
|
"loss": 0.2855, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.8518518518518515e-06, |
|
"loss": 0.275, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.843621399176955e-06, |
|
"loss": 0.2865, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.8353909465020585e-06, |
|
"loss": 0.2369, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.827160493827161e-06, |
|
"loss": 0.2382, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.818930041152264e-06, |
|
"loss": 0.27, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.8106995884773665e-06, |
|
"loss": 0.2703, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.80246913580247e-06, |
|
"loss": 0.25, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.794238683127572e-06, |
|
"loss": 0.2547, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.786008230452675e-06, |
|
"loss": 0.2467, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 0.2602, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.769547325102881e-06, |
|
"loss": 0.2528, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.761316872427984e-06, |
|
"loss": 0.2527, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.753086419753087e-06, |
|
"loss": 0.2861, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.74485596707819e-06, |
|
"loss": 0.2252, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.736625514403292e-06, |
|
"loss": 0.2899, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 5.728395061728396e-06, |
|
"loss": 0.2609, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.720164609053498e-06, |
|
"loss": 0.2716, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.711934156378601e-06, |
|
"loss": 0.2581, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.7037037037037045e-06, |
|
"loss": 0.2699, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.695473251028807e-06, |
|
"loss": 0.2363, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.68724279835391e-06, |
|
"loss": 0.2789, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.6790123456790125e-06, |
|
"loss": 0.2448, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.670781893004116e-06, |
|
"loss": 0.2535, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.662551440329218e-06, |
|
"loss": 0.2712, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.654320987654321e-06, |
|
"loss": 0.2785, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.646090534979425e-06, |
|
"loss": 0.267, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.6378600823045275e-06, |
|
"loss": 0.272, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.62962962962963e-06, |
|
"loss": 0.2355, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 5.621399176954733e-06, |
|
"loss": 0.2676, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.613168724279836e-06, |
|
"loss": 0.2593, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.604938271604938e-06, |
|
"loss": 0.2324, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.596707818930042e-06, |
|
"loss": 0.2456, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.588477366255145e-06, |
|
"loss": 0.2471, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.580246913580247e-06, |
|
"loss": 0.2658, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.5720164609053505e-06, |
|
"loss": 0.2509, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.563786008230453e-06, |
|
"loss": 0.2893, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.2642, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.5473251028806585e-06, |
|
"loss": 0.2983, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.539094650205762e-06, |
|
"loss": 0.2747, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.530864197530864e-06, |
|
"loss": 0.2233, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.522633744855967e-06, |
|
"loss": 0.2528, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.514403292181071e-06, |
|
"loss": 0.2838, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.5061728395061735e-06, |
|
"loss": 0.2569, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.497942386831276e-06, |
|
"loss": 0.2461, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.489711934156379e-06, |
|
"loss": 0.2687, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.481481481481482e-06, |
|
"loss": 0.2651, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.473251028806584e-06, |
|
"loss": 0.2372, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.465020576131688e-06, |
|
"loss": 0.2937, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.456790123456791e-06, |
|
"loss": 0.2497, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.448559670781893e-06, |
|
"loss": 0.27, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.4403292181069965e-06, |
|
"loss": 0.2497, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.432098765432099e-06, |
|
"loss": 0.2759, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.423868312757203e-06, |
|
"loss": 0.2654, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 5.4156378600823045e-06, |
|
"loss": 0.2582, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.407407407407408e-06, |
|
"loss": 0.2627, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.3991769547325115e-06, |
|
"loss": 0.2594, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.390946502057613e-06, |
|
"loss": 0.2633, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.382716049382717e-06, |
|
"loss": 0.2781, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.3744855967078195e-06, |
|
"loss": 0.2156, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.366255144032922e-06, |
|
"loss": 0.2623, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.358024691358025e-06, |
|
"loss": 0.2532, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.349794238683128e-06, |
|
"loss": 0.2402, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.341563786008232e-06, |
|
"loss": 0.2577, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 0.2353, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.325102880658437e-06, |
|
"loss": 0.2861, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.316872427983539e-06, |
|
"loss": 0.2117, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.3086419753086425e-06, |
|
"loss": 0.2399, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.300411522633745e-06, |
|
"loss": 0.251, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.292181069958849e-06, |
|
"loss": 0.2707, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.2839506172839505e-06, |
|
"loss": 0.2547, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.275720164609054e-06, |
|
"loss": 0.2431, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.2674897119341575e-06, |
|
"loss": 0.26, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.259259259259259e-06, |
|
"loss": 0.27, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.251028806584363e-06, |
|
"loss": 0.2737, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.2427983539094655e-06, |
|
"loss": 0.2615, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.234567901234568e-06, |
|
"loss": 0.2572, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.226337448559671e-06, |
|
"loss": 0.2597, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.218106995884774e-06, |
|
"loss": 0.2537, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 5.209876543209878e-06, |
|
"loss": 0.252, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.20164609053498e-06, |
|
"loss": 0.2793, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.193415637860083e-06, |
|
"loss": 0.2711, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.185185185185185e-06, |
|
"loss": 0.2445, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.1769547325102885e-06, |
|
"loss": 0.266, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.168724279835391e-06, |
|
"loss": 0.2833, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.160493827160495e-06, |
|
"loss": 0.2508, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.152263374485597e-06, |
|
"loss": 0.2998, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.1440329218107e-06, |
|
"loss": 0.2842, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.1358024691358035e-06, |
|
"loss": 0.2595, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.127572016460905e-06, |
|
"loss": 0.2716, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.119341563786009e-06, |
|
"loss": 0.2879, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 0.2694, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.102880658436214e-06, |
|
"loss": 0.2453, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.094650205761317e-06, |
|
"loss": 0.2823, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.08641975308642e-06, |
|
"loss": 0.2519, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.078189300411524e-06, |
|
"loss": 0.2511, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.069958847736626e-06, |
|
"loss": 0.2627, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.061728395061729e-06, |
|
"loss": 0.3092, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.053497942386831e-06, |
|
"loss": 0.2493, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.0452674897119345e-06, |
|
"loss": 0.271, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.037037037037037e-06, |
|
"loss": 0.2664, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.028806584362141e-06, |
|
"loss": 0.2246, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.020576131687243e-06, |
|
"loss": 0.2533, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.012345679012346e-06, |
|
"loss": 0.2793, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.0041152263374495e-06, |
|
"loss": 0.2333, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.995884773662552e-06, |
|
"loss": 0.2839, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.987654320987655e-06, |
|
"loss": 0.2498, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9794238683127575e-06, |
|
"loss": 0.2473, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.97119341563786e-06, |
|
"loss": 0.2323, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.962962962962964e-06, |
|
"loss": 0.3006, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.954732510288066e-06, |
|
"loss": 0.2506, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.946502057613169e-06, |
|
"loss": 0.2769, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.938271604938272e-06, |
|
"loss": 0.2464, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.930041152263375e-06, |
|
"loss": 0.2727, |
|
"step": 1907 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.921810699588478e-06, |
|
"loss": 0.2357, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.9135802469135805e-06, |
|
"loss": 0.2728, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 4.905349794238683e-06, |
|
"loss": 0.2477, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.897119341563787e-06, |
|
"loss": 0.2493, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 0.284, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.880658436213992e-06, |
|
"loss": 0.2573, |
|
"step": 1913 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.8724279835390955e-06, |
|
"loss": 0.2838, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.864197530864198e-06, |
|
"loss": 0.2588, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.855967078189301e-06, |
|
"loss": 0.2998, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.8477366255144035e-06, |
|
"loss": 0.2665, |
|
"step": 1917 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.839506172839506e-06, |
|
"loss": 0.2754, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.83127572016461e-06, |
|
"loss": 0.2484, |
|
"step": 1919 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.823045267489712e-06, |
|
"loss": 0.2821, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.814814814814815e-06, |
|
"loss": 0.2555, |
|
"step": 1921 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.8065843621399185e-06, |
|
"loss": 0.2807, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 4.798353909465021e-06, |
|
"loss": 0.2413, |
|
"step": 1923 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.790123456790124e-06, |
|
"loss": 0.2827, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.7818930041152265e-06, |
|
"loss": 0.258, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.773662551440329e-06, |
|
"loss": 0.2261, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.765432098765433e-06, |
|
"loss": 0.243, |
|
"step": 1927 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.757201646090535e-06, |
|
"loss": 0.2352, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.748971193415639e-06, |
|
"loss": 0.24, |
|
"step": 1929 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.7407407407407415e-06, |
|
"loss": 0.2931, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.732510288065844e-06, |
|
"loss": 0.2676, |
|
"step": 1931 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.724279835390947e-06, |
|
"loss": 0.3159, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.7160493827160495e-06, |
|
"loss": 0.2811, |
|
"step": 1933 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.707818930041152e-06, |
|
"loss": 0.2532, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 4.699588477366256e-06, |
|
"loss": 0.2756, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.691358024691358e-06, |
|
"loss": 0.27, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.683127572016462e-06, |
|
"loss": 0.2452, |
|
"step": 1937 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.6748971193415645e-06, |
|
"loss": 0.2569, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 0.2983, |
|
"step": 1939 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.65843621399177e-06, |
|
"loss": 0.2712, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.6502057613168725e-06, |
|
"loss": 0.382, |
|
"step": 1941 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.641975308641975e-06, |
|
"loss": 0.2679, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.633744855967079e-06, |
|
"loss": 0.2697, |
|
"step": 1943 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.625514403292181e-06, |
|
"loss": 0.2801, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.617283950617285e-06, |
|
"loss": 0.2229, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.6090534979423875e-06, |
|
"loss": 0.2478, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.60082304526749e-06, |
|
"loss": 0.2377, |
|
"step": 1947 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 4.592592592592593e-06, |
|
"loss": 0.3088, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.5843621399176955e-06, |
|
"loss": 0.2409, |
|
"step": 1949 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.576131687242798e-06, |
|
"loss": 0.2787, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.567901234567902e-06, |
|
"loss": 0.2176, |
|
"step": 1951 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.559670781893004e-06, |
|
"loss": 0.3024, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.551440329218108e-06, |
|
"loss": 0.2576, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.5432098765432105e-06, |
|
"loss": 0.259, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.534979423868313e-06, |
|
"loss": 0.288, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.526748971193416e-06, |
|
"loss": 0.2489, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.5185185185185185e-06, |
|
"loss": 0.2566, |
|
"step": 1957 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.510288065843621e-06, |
|
"loss": 0.2359, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.502057613168725e-06, |
|
"loss": 0.2746, |
|
"step": 1959 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 4.493827160493827e-06, |
|
"loss": 0.2377, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.485596707818931e-06, |
|
"loss": 0.2431, |
|
"step": 1961 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.4773662551440335e-06, |
|
"loss": 0.2478, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.469135802469136e-06, |
|
"loss": 0.288, |
|
"step": 1963 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.460905349794239e-06, |
|
"loss": 0.2531, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.4526748971193415e-06, |
|
"loss": 0.24, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.2583, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.436213991769548e-06, |
|
"loss": 0.3077, |
|
"step": 1967 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.42798353909465e-06, |
|
"loss": 0.2674, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.419753086419754e-06, |
|
"loss": 0.2602, |
|
"step": 1969 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.4115226337448565e-06, |
|
"loss": 0.2894, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.403292181069959e-06, |
|
"loss": 0.3005, |
|
"step": 1971 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.395061728395062e-06, |
|
"loss": 0.2807, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 4.3868312757201645e-06, |
|
"loss": 0.285, |
|
"step": 1973 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.378600823045268e-06, |
|
"loss": 0.2473, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.370370370370371e-06, |
|
"loss": 0.2729, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.362139917695473e-06, |
|
"loss": 0.2445, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.353909465020577e-06, |
|
"loss": 0.2545, |
|
"step": 1977 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.3456790123456795e-06, |
|
"loss": 0.2791, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.337448559670782e-06, |
|
"loss": 0.2772, |
|
"step": 1979 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.329218106995885e-06, |
|
"loss": 0.2498, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.3209876543209875e-06, |
|
"loss": 0.2375, |
|
"step": 1981 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.312757201646091e-06, |
|
"loss": 0.269, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.304526748971194e-06, |
|
"loss": 0.2423, |
|
"step": 1983 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.296296296296296e-06, |
|
"loss": 0.277, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.2880658436214e-06, |
|
"loss": 0.2989, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 4.2798353909465025e-06, |
|
"loss": 0.2781, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.271604938271605e-06, |
|
"loss": 0.2403, |
|
"step": 1987 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.263374485596708e-06, |
|
"loss": 0.2797, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2551440329218105e-06, |
|
"loss": 0.3414, |
|
"step": 1989 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.246913580246914e-06, |
|
"loss": 0.273, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.238683127572017e-06, |
|
"loss": 0.2491, |
|
"step": 1991 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.23045267489712e-06, |
|
"loss": 0.2778, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 0.2598, |
|
"step": 1993 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.2139917695473255e-06, |
|
"loss": 0.2426, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.205761316872428e-06, |
|
"loss": 0.2425, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.197530864197531e-06, |
|
"loss": 0.2548, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.189300411522634e-06, |
|
"loss": 0.2949, |
|
"step": 1997 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 4.181069958847737e-06, |
|
"loss": 0.2693, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.17283950617284e-06, |
|
"loss": 0.2733, |
|
"step": 1999 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 4.164609053497943e-06, |
|
"loss": 0.2524, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 2506, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"total_flos": 0.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|