|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 14.679611650485437, |
|
"global_step": 630, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 2.631578947368421e-06, |
|
"loss": 1.9044, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 5.263157894736842e-06, |
|
"loss": 2.018, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 7.894736842105263e-06, |
|
"loss": 2.0221, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.0526315789473684e-05, |
|
"loss": 1.8884, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 1.9326, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.5789473684210526e-05, |
|
"loss": 2.1193, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.8421052631578947e-05, |
|
"loss": 2.0827, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.105263157894737e-05, |
|
"loss": 1.8846, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.368421052631579e-05, |
|
"loss": 2.0943, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 2.0881, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.8947368421052634e-05, |
|
"loss": 1.9275, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.157894736842105e-05, |
|
"loss": 1.6874, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.421052631578947e-05, |
|
"loss": 1.9874, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.6842105263157895e-05, |
|
"loss": 1.8901, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 1.7683, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.210526315789474e-05, |
|
"loss": 2.0108, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.473684210526316e-05, |
|
"loss": 1.6579, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.736842105263158e-05, |
|
"loss": 1.9758, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9118, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 5.2631578947368424e-05, |
|
"loss": 1.8964, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 5.526315789473685e-05, |
|
"loss": 1.9288, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 5.789473684210527e-05, |
|
"loss": 1.8706, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 6.052631578947369e-05, |
|
"loss": 1.8604, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 6.31578947368421e-05, |
|
"loss": 1.9001, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 6.578947368421054e-05, |
|
"loss": 1.8254, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.842105263157895e-05, |
|
"loss": 1.8111, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.105263157894737e-05, |
|
"loss": 1.7496, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.368421052631579e-05, |
|
"loss": 1.7604, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.631578947368422e-05, |
|
"loss": 1.6285, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.894736842105263e-05, |
|
"loss": 1.7226, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.157894736842105e-05, |
|
"loss": 1.9689, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.421052631578948e-05, |
|
"loss": 1.7737, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 8.68421052631579e-05, |
|
"loss": 1.8093, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 8.947368421052632e-05, |
|
"loss": 1.6292, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.210526315789474e-05, |
|
"loss": 1.8351, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.473684210526316e-05, |
|
"loss": 1.918, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.736842105263158e-05, |
|
"loss": 1.8161, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.0001, |
|
"loss": 1.8184, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.983108108108109e-05, |
|
"loss": 1.5839, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.966216216216217e-05, |
|
"loss": 1.8736, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.949324324324325e-05, |
|
"loss": 1.77, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.932432432432433e-05, |
|
"loss": 1.7912, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.915540540540541e-05, |
|
"loss": 1.6989, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 9.89864864864865e-05, |
|
"loss": 1.7327, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.881756756756757e-05, |
|
"loss": 1.7799, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 9.864864864864865e-05, |
|
"loss": 1.8298, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 9.847972972972973e-05, |
|
"loss": 1.8616, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.831081081081081e-05, |
|
"loss": 1.7984, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 9.81418918918919e-05, |
|
"loss": 1.7626, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.797297297297297e-05, |
|
"loss": 1.8424, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 9.780405405405407e-05, |
|
"loss": 1.6436, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 9.763513513513513e-05, |
|
"loss": 1.6303, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 9.746621621621623e-05, |
|
"loss": 1.7199, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.729729729729731e-05, |
|
"loss": 1.6789, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 9.712837837837838e-05, |
|
"loss": 1.8519, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 9.695945945945947e-05, |
|
"loss": 1.7961, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 9.679054054054054e-05, |
|
"loss": 1.6725, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.662162162162163e-05, |
|
"loss": 1.7249, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 9.64527027027027e-05, |
|
"loss": 1.5996, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.628378378378379e-05, |
|
"loss": 1.7033, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 9.611486486486487e-05, |
|
"loss": 1.7387, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.594594594594595e-05, |
|
"loss": 1.7804, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 9.577702702702703e-05, |
|
"loss": 1.7484, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.56081081081081e-05, |
|
"loss": 1.7526, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 9.54391891891892e-05, |
|
"loss": 1.7247, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 9.527027027027028e-05, |
|
"loss": 1.6276, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.510135135135136e-05, |
|
"loss": 1.8279, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.493243243243244e-05, |
|
"loss": 1.7067, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.476351351351352e-05, |
|
"loss": 1.7865, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.45945945945946e-05, |
|
"loss": 1.6693, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.442567567567568e-05, |
|
"loss": 1.6939, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 9.425675675675676e-05, |
|
"loss": 1.8667, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.408783783783784e-05, |
|
"loss": 1.746, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 9.391891891891892e-05, |
|
"loss": 1.6951, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.375e-05, |
|
"loss": 1.6749, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 9.35810810810811e-05, |
|
"loss": 1.8773, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 9.341216216216216e-05, |
|
"loss": 1.8064, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.324324324324324e-05, |
|
"loss": 1.6596, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 9.307432432432432e-05, |
|
"loss": 1.7321, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 9.29054054054054e-05, |
|
"loss": 1.7029, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.27364864864865e-05, |
|
"loss": 1.7392, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 9.256756756756757e-05, |
|
"loss": 1.7904, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 9.239864864864866e-05, |
|
"loss": 1.6794, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 9.222972972972973e-05, |
|
"loss": 1.8827, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 9.206081081081082e-05, |
|
"loss": 1.7051, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.18918918918919e-05, |
|
"loss": 1.7069, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 9.172297297297297e-05, |
|
"loss": 1.8051, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 9.155405405405406e-05, |
|
"loss": 1.6665, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 9.138513513513513e-05, |
|
"loss": 1.6218, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 9.121621621621623e-05, |
|
"loss": 1.5848, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.10472972972973e-05, |
|
"loss": 1.7544, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.087837837837839e-05, |
|
"loss": 1.6454, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 9.070945945945947e-05, |
|
"loss": 1.8591, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 9.054054054054055e-05, |
|
"loss": 1.7025, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 9.037162162162163e-05, |
|
"loss": 1.7824, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 9.02027027027027e-05, |
|
"loss": 1.6049, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 9.003378378378379e-05, |
|
"loss": 1.6961, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 8.986486486486487e-05, |
|
"loss": 1.8705, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 8.969594594594595e-05, |
|
"loss": 1.4498, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 8.952702702702703e-05, |
|
"loss": 1.6967, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 8.935810810810811e-05, |
|
"loss": 1.6345, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 8.918918918918919e-05, |
|
"loss": 1.8037, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 8.902027027027027e-05, |
|
"loss": 1.7896, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 8.885135135135135e-05, |
|
"loss": 1.751, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 8.868243243243243e-05, |
|
"loss": 1.482, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 8.851351351351352e-05, |
|
"loss": 1.6049, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 8.83445945945946e-05, |
|
"loss": 1.6029, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.817567567567569e-05, |
|
"loss": 1.875, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 8.800675675675676e-05, |
|
"loss": 1.5798, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 8.783783783783784e-05, |
|
"loss": 1.4883, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 8.766891891891892e-05, |
|
"loss": 1.7249, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.75e-05, |
|
"loss": 1.5086, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.73310810810811e-05, |
|
"loss": 1.39, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 8.716216216216216e-05, |
|
"loss": 1.7481, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 8.699324324324325e-05, |
|
"loss": 1.6514, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.682432432432432e-05, |
|
"loss": 1.8731, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 8.665540540540542e-05, |
|
"loss": 1.6415, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.64864864864865e-05, |
|
"loss": 1.7659, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 8.631756756756756e-05, |
|
"loss": 1.5691, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 8.614864864864866e-05, |
|
"loss": 1.6315, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 8.597972972972972e-05, |
|
"loss": 1.367, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.581081081081082e-05, |
|
"loss": 1.6088, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 8.56418918918919e-05, |
|
"loss": 1.7598, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 8.547297297297298e-05, |
|
"loss": 1.6255, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.530405405405406e-05, |
|
"loss": 1.5739, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 8.513513513513514e-05, |
|
"loss": 1.5542, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.496621621621622e-05, |
|
"loss": 1.5758, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 8.47972972972973e-05, |
|
"loss": 1.668, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 8.462837837837838e-05, |
|
"loss": 1.6283, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 3.03, |
|
"learning_rate": 8.445945945945946e-05, |
|
"loss": 1.6111, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 8.429054054054054e-05, |
|
"loss": 1.8061, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 8.412162162162163e-05, |
|
"loss": 1.4882, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 8.39527027027027e-05, |
|
"loss": 1.592, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.378378378378379e-05, |
|
"loss": 1.638, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 8.361486486486487e-05, |
|
"loss": 1.5036, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 8.344594594594595e-05, |
|
"loss": 1.6721, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 8.327702702702703e-05, |
|
"loss": 1.7742, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 8.310810810810811e-05, |
|
"loss": 1.484, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 8.293918918918919e-05, |
|
"loss": 1.3698, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 8.277027027027028e-05, |
|
"loss": 1.5644, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 8.260135135135135e-05, |
|
"loss": 1.6195, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 8.243243243243243e-05, |
|
"loss": 1.5031, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 8.226351351351351e-05, |
|
"loss": 1.5403, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 8.209459459459459e-05, |
|
"loss": 1.57, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.192567567567569e-05, |
|
"loss": 1.6454, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 8.175675675675675e-05, |
|
"loss": 1.5069, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 8.158783783783785e-05, |
|
"loss": 1.3782, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 8.141891891891892e-05, |
|
"loss": 1.4296, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 8.125000000000001e-05, |
|
"loss": 1.4303, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.108108108108109e-05, |
|
"loss": 1.6368, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 8.091216216216216e-05, |
|
"loss": 1.5951, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 8.074324324324325e-05, |
|
"loss": 1.5769, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 8.057432432432432e-05, |
|
"loss": 1.482, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 8.040540540540541e-05, |
|
"loss": 1.5415, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 8.02364864864865e-05, |
|
"loss": 1.6084, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 8.006756756756757e-05, |
|
"loss": 1.5483, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 7.989864864864865e-05, |
|
"loss": 1.4329, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 7.972972972972974e-05, |
|
"loss": 1.6068, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 7.956081081081082e-05, |
|
"loss": 1.5519, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 7.93918918918919e-05, |
|
"loss": 1.7387, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 7.922297297297298e-05, |
|
"loss": 1.5583, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 7.905405405405406e-05, |
|
"loss": 1.5003, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 7.888513513513514e-05, |
|
"loss": 1.5025, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 7.871621621621622e-05, |
|
"loss": 1.5322, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 7.85472972972973e-05, |
|
"loss": 1.6915, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 7.837837837837838e-05, |
|
"loss": 1.4906, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 7.820945945945946e-05, |
|
"loss": 1.4796, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 7.804054054054054e-05, |
|
"loss": 1.5712, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 7.787162162162162e-05, |
|
"loss": 1.459, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 7.77027027027027e-05, |
|
"loss": 1.4917, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 7.753378378378378e-05, |
|
"loss": 1.5149, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 7.736486486486488e-05, |
|
"loss": 1.4344, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 7.719594594594595e-05, |
|
"loss": 1.4152, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 7.702702702702703e-05, |
|
"loss": 1.3729, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 7.68581081081081e-05, |
|
"loss": 1.4462, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 7.668918918918919e-05, |
|
"loss": 1.5449, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 7.652027027027028e-05, |
|
"loss": 1.2145, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 7.635135135135135e-05, |
|
"loss": 1.4754, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 7.618243243243244e-05, |
|
"loss": 1.3256, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 7.601351351351351e-05, |
|
"loss": 1.3754, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 7.58445945945946e-05, |
|
"loss": 1.4267, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 7.567567567567568e-05, |
|
"loss": 1.4389, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 7.550675675675675e-05, |
|
"loss": 1.4266, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 7.533783783783785e-05, |
|
"loss": 1.4098, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 7.516891891891891e-05, |
|
"loss": 1.0715, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.4547, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 7.483108108108109e-05, |
|
"loss": 1.5758, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 7.466216216216217e-05, |
|
"loss": 1.3765, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 7.449324324324325e-05, |
|
"loss": 1.4207, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 7.432432432432433e-05, |
|
"loss": 1.4264, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 7.415540540540541e-05, |
|
"loss": 1.2975, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 7.398648648648649e-05, |
|
"loss": 1.5207, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 7.381756756756757e-05, |
|
"loss": 1.5805, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 7.364864864864865e-05, |
|
"loss": 1.3546, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 7.347972972972973e-05, |
|
"loss": 1.6047, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 7.331081081081081e-05, |
|
"loss": 1.4313, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 7.31418918918919e-05, |
|
"loss": 1.371, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 7.297297297297297e-05, |
|
"loss": 1.4083, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 7.280405405405406e-05, |
|
"loss": 1.6423, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 7.263513513513514e-05, |
|
"loss": 1.4229, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 7.246621621621622e-05, |
|
"loss": 1.4711, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 7.229729729729731e-05, |
|
"loss": 1.3806, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 7.212837837837838e-05, |
|
"loss": 1.4641, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 7.195945945945947e-05, |
|
"loss": 1.6534, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 7.179054054054054e-05, |
|
"loss": 1.3054, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 7.162162162162162e-05, |
|
"loss": 1.4058, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 7.14527027027027e-05, |
|
"loss": 1.5076, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 7.128378378378378e-05, |
|
"loss": 1.5715, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 7.111486486486488e-05, |
|
"loss": 1.4723, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 4.89, |
|
"learning_rate": 7.094594594594594e-05, |
|
"loss": 1.3515, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 7.077702702702704e-05, |
|
"loss": 1.2815, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 7.06081081081081e-05, |
|
"loss": 1.2335, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 7.04391891891892e-05, |
|
"loss": 1.2843, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 7.027027027027028e-05, |
|
"loss": 1.518, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 7.010135135135135e-05, |
|
"loss": 1.3281, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 6.993243243243244e-05, |
|
"loss": 1.3571, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 6.97635135135135e-05, |
|
"loss": 1.2656, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 6.95945945945946e-05, |
|
"loss": 1.3251, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 6.942567567567568e-05, |
|
"loss": 1.2875, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 6.925675675675676e-05, |
|
"loss": 1.4429, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 6.908783783783784e-05, |
|
"loss": 1.1838, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 6.891891891891892e-05, |
|
"loss": 1.1694, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 6.875e-05, |
|
"loss": 1.2747, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 6.858108108108108e-05, |
|
"loss": 1.3806, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 6.841216216216217e-05, |
|
"loss": 1.1208, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 6.824324324324325e-05, |
|
"loss": 1.4053, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"learning_rate": 6.807432432432433e-05, |
|
"loss": 1.3146, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 6.790540540540541e-05, |
|
"loss": 1.3767, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 5.34, |
|
"learning_rate": 6.773648648648649e-05, |
|
"loss": 1.2501, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 6.756756756756757e-05, |
|
"loss": 1.3812, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 6.739864864864865e-05, |
|
"loss": 1.1772, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 6.722972972972973e-05, |
|
"loss": 1.3066, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 6.706081081081081e-05, |
|
"loss": 1.3026, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 6.68918918918919e-05, |
|
"loss": 1.2601, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 6.672297297297297e-05, |
|
"loss": 1.1287, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 6.655405405405407e-05, |
|
"loss": 1.4575, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 5.52, |
|
"learning_rate": 6.638513513513513e-05, |
|
"loss": 1.2495, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 6.621621621621621e-05, |
|
"loss": 1.1763, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 6.604729729729731e-05, |
|
"loss": 1.2768, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 6.587837837837837e-05, |
|
"loss": 1.3268, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 6.570945945945947e-05, |
|
"loss": 1.2082, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 6.554054054054054e-05, |
|
"loss": 1.3312, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 6.537162162162163e-05, |
|
"loss": 1.3582, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 6.52027027027027e-05, |
|
"loss": 1.4009, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 6.503378378378379e-05, |
|
"loss": 1.3129, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 6.486486486486487e-05, |
|
"loss": 1.0997, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 6.469594594594594e-05, |
|
"loss": 1.3952, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 6.452702702702703e-05, |
|
"loss": 1.328, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 6.43581081081081e-05, |
|
"loss": 1.3402, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 6.41891891891892e-05, |
|
"loss": 1.6361, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 6.402027027027028e-05, |
|
"loss": 1.2595, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 6.385135135135136e-05, |
|
"loss": 1.2653, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 6.368243243243244e-05, |
|
"loss": 1.2824, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 6.351351351351352e-05, |
|
"loss": 1.3233, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 6.33445945945946e-05, |
|
"loss": 1.1506, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 6.317567567567568e-05, |
|
"loss": 1.1696, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 6.300675675675676e-05, |
|
"loss": 1.3595, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 6.283783783783784e-05, |
|
"loss": 1.2825, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 6.266891891891892e-05, |
|
"loss": 1.4051, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 6.25e-05, |
|
"loss": 1.2085, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 6.233108108108108e-05, |
|
"loss": 1.2621, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 6.216216216216216e-05, |
|
"loss": 1.293, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 6.199324324324324e-05, |
|
"loss": 1.2971, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 6.182432432432432e-05, |
|
"loss": 1.0583, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 6.16554054054054e-05, |
|
"loss": 1.225, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 6.14864864864865e-05, |
|
"loss": 1.0673, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 6.131756756756757e-05, |
|
"loss": 1.1158, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"learning_rate": 6.114864864864866e-05, |
|
"loss": 1.1983, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 6.27, |
|
"learning_rate": 6.097972972972973e-05, |
|
"loss": 1.1234, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 6.29, |
|
"learning_rate": 6.0810810810810814e-05, |
|
"loss": 1.2147, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 6.06418918918919e-05, |
|
"loss": 1.0961, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 6.0472972972972976e-05, |
|
"loss": 1.2271, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 6.030405405405406e-05, |
|
"loss": 0.8272, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 6.013513513513514e-05, |
|
"loss": 1.0314, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 5.996621621621622e-05, |
|
"loss": 1.172, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 5.9797297297297305e-05, |
|
"loss": 1.1037, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 6.45, |
|
"learning_rate": 5.962837837837838e-05, |
|
"loss": 1.0629, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 5.9459459459459466e-05, |
|
"loss": 1.1261, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 5.929054054054054e-05, |
|
"loss": 1.2574, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 5.912162162162163e-05, |
|
"loss": 1.3313, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 5.89527027027027e-05, |
|
"loss": 0.924, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 5.878378378378379e-05, |
|
"loss": 1.1797, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 5.861486486486487e-05, |
|
"loss": 1.1554, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 5.8445945945945943e-05, |
|
"loss": 1.2582, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 5.827702702702703e-05, |
|
"loss": 1.2177, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 5.8108108108108105e-05, |
|
"loss": 1.1448, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 5.793918918918919e-05, |
|
"loss": 1.1025, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 5.777027027027028e-05, |
|
"loss": 1.1173, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 5.760135135135135e-05, |
|
"loss": 1.1789, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 5.7432432432432434e-05, |
|
"loss": 1.3601, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 5.7263513513513515e-05, |
|
"loss": 1.2766, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 5.7094594594594595e-05, |
|
"loss": 1.2541, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 5.692567567567568e-05, |
|
"loss": 1.2084, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 5.6756756756756757e-05, |
|
"loss": 1.3129, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 5.6587837837837844e-05, |
|
"loss": 1.1758, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 5.641891891891892e-05, |
|
"loss": 1.341, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 6.92, |
|
"learning_rate": 5.6250000000000005e-05, |
|
"loss": 1.3618, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 5.6081081081081086e-05, |
|
"loss": 1.0368, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 5.591216216216216e-05, |
|
"loss": 1.1718, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 5.574324324324325e-05, |
|
"loss": 1.1364, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 5.557432432432432e-05, |
|
"loss": 1.1075, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 5.540540540540541e-05, |
|
"loss": 1.2325, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 5.5236486486486496e-05, |
|
"loss": 0.8635, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 5.506756756756757e-05, |
|
"loss": 0.7712, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 5.489864864864866e-05, |
|
"loss": 0.9811, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 5.472972972972973e-05, |
|
"loss": 1.1597, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"learning_rate": 5.456081081081081e-05, |
|
"loss": 0.9311, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 5.43918918918919e-05, |
|
"loss": 1.1239, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 5.422297297297297e-05, |
|
"loss": 1.2107, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 5.405405405405406e-05, |
|
"loss": 1.2178, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 5.3885135135135134e-05, |
|
"loss": 1.1905, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 5.371621621621622e-05, |
|
"loss": 1.2219, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 5.35472972972973e-05, |
|
"loss": 1.0516, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 7.32, |
|
"learning_rate": 5.337837837837838e-05, |
|
"loss": 1.0192, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 5.3209459459459463e-05, |
|
"loss": 0.9752, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 5.304054054054054e-05, |
|
"loss": 0.9826, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 5.2871621621621625e-05, |
|
"loss": 1.3142, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 5.27027027027027e-05, |
|
"loss": 1.1342, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 5.2533783783783786e-05, |
|
"loss": 1.1025, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 5.2364864864864873e-05, |
|
"loss": 1.2068, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 5.219594594594595e-05, |
|
"loss": 1.0683, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 5.202702702702703e-05, |
|
"loss": 0.859, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 7.53, |
|
"learning_rate": 5.185810810810811e-05, |
|
"loss": 1.0902, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 5.168918918918919e-05, |
|
"loss": 1.0506, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 5.152027027027028e-05, |
|
"loss": 1.019, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 5.135135135135135e-05, |
|
"loss": 0.6924, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 5.118243243243244e-05, |
|
"loss": 1.1495, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 5.101351351351351e-05, |
|
"loss": 1.0561, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 5.08445945945946e-05, |
|
"loss": 1.1057, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 5.067567567567568e-05, |
|
"loss": 1.0621, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 5.0506756756756754e-05, |
|
"loss": 1.0147, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 5.033783783783784e-05, |
|
"loss": 1.0904, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 5.0168918918918915e-05, |
|
"loss": 1.1281, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0421, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 4.983108108108108e-05, |
|
"loss": 1.1218, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 4.9662162162162164e-05, |
|
"loss": 1.0854, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 4.949324324324325e-05, |
|
"loss": 1.1577, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 4.9324324324324325e-05, |
|
"loss": 1.078, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 4.9155405405405406e-05, |
|
"loss": 1.1829, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 4.8986486486486486e-05, |
|
"loss": 1.09, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 4.881756756756757e-05, |
|
"loss": 0.9816, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 4.8648648648648654e-05, |
|
"loss": 0.9653, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 4.8479729729729735e-05, |
|
"loss": 1.1243, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 4.8310810810810816e-05, |
|
"loss": 0.9883, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 4.8141891891891896e-05, |
|
"loss": 0.8322, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 4.797297297297298e-05, |
|
"loss": 0.9729, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 4.780405405405405e-05, |
|
"loss": 1.0465, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 4.763513513513514e-05, |
|
"loss": 0.8629, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 4.746621621621622e-05, |
|
"loss": 1.15, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 8.16, |
|
"learning_rate": 4.72972972972973e-05, |
|
"loss": 0.9984, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"learning_rate": 4.712837837837838e-05, |
|
"loss": 0.939, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 4.695945945945946e-05, |
|
"loss": 0.9231, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 8.23, |
|
"learning_rate": 4.679054054054055e-05, |
|
"loss": 1.1219, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 4.662162162162162e-05, |
|
"loss": 1.1236, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 4.64527027027027e-05, |
|
"loss": 0.9729, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 4.628378378378378e-05, |
|
"loss": 1.0206, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 4.6114864864864864e-05, |
|
"loss": 1.1789, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 4.594594594594595e-05, |
|
"loss": 0.9932, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 4.577702702702703e-05, |
|
"loss": 1.0049, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 4.560810810810811e-05, |
|
"loss": 1.0889, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"learning_rate": 4.543918918918919e-05, |
|
"loss": 0.8475, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 8.43, |
|
"learning_rate": 4.5270270270270274e-05, |
|
"loss": 1.0319, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 8.46, |
|
"learning_rate": 4.510135135135135e-05, |
|
"loss": 0.9336, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 4.4932432432432435e-05, |
|
"loss": 0.9678, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 4.4763513513513516e-05, |
|
"loss": 0.8758, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 4.4594594594594596e-05, |
|
"loss": 0.9342, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 4.442567567567568e-05, |
|
"loss": 1.1054, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 4.425675675675676e-05, |
|
"loss": 0.8595, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 4.4087837837837845e-05, |
|
"loss": 0.9209, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 4.391891891891892e-05, |
|
"loss": 0.9508, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 4.375e-05, |
|
"loss": 0.9705, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 4.358108108108108e-05, |
|
"loss": 0.9421, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 4.341216216216216e-05, |
|
"loss": 1.0525, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 4.324324324324325e-05, |
|
"loss": 0.9324, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 4.307432432432433e-05, |
|
"loss": 0.9463, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 4.290540540540541e-05, |
|
"loss": 0.7919, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 4.273648648648649e-05, |
|
"loss": 1.0147, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 4.256756756756757e-05, |
|
"loss": 0.9019, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 4.239864864864865e-05, |
|
"loss": 0.8662, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 4.222972972972973e-05, |
|
"loss": 0.9442, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 4.206081081081081e-05, |
|
"loss": 1.1238, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 8.9, |
|
"learning_rate": 4.189189189189189e-05, |
|
"loss": 1.0176, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 4.1722972972972974e-05, |
|
"loss": 1.1425, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 8.95, |
|
"learning_rate": 4.1554054054054055e-05, |
|
"loss": 0.7894, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 4.138513513513514e-05, |
|
"loss": 1.0243, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 4.1216216216216216e-05, |
|
"loss": 1.1354, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 4.1047297297297297e-05, |
|
"loss": 0.8477, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 4.087837837837838e-05, |
|
"loss": 0.9073, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 9.06, |
|
"learning_rate": 4.070945945945946e-05, |
|
"loss": 1.0169, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"learning_rate": 4.0540540540540545e-05, |
|
"loss": 0.7684, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 9.11, |
|
"learning_rate": 4.0371621621621626e-05, |
|
"loss": 1.1133, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.0202702702702707e-05, |
|
"loss": 0.9712, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 9.16, |
|
"learning_rate": 4.003378378378379e-05, |
|
"loss": 0.9081, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 3.986486486486487e-05, |
|
"loss": 0.788, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.969594594594595e-05, |
|
"loss": 1.0385, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 3.952702702702703e-05, |
|
"loss": 1.1285, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.935810810810811e-05, |
|
"loss": 0.7035, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.918918918918919e-05, |
|
"loss": 0.9234, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 3.902027027027027e-05, |
|
"loss": 1.0529, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.885135135135135e-05, |
|
"loss": 0.7203, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 9.34, |
|
"learning_rate": 3.868243243243244e-05, |
|
"loss": 0.7964, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.851351351351351e-05, |
|
"loss": 0.8433, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 9.39, |
|
"learning_rate": 3.8344594594594594e-05, |
|
"loss": 0.9627, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 3.8175675675675674e-05, |
|
"loss": 0.7189, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"learning_rate": 3.8006756756756755e-05, |
|
"loss": 0.9945, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 3.783783783783784e-05, |
|
"loss": 0.9255, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"learning_rate": 3.766891891891892e-05, |
|
"loss": 0.8539, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 9.51, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.8397, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 9.53, |
|
"learning_rate": 3.7331081081081084e-05, |
|
"loss": 0.7326, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 3.7162162162162165e-05, |
|
"loss": 0.8779, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 9.58, |
|
"learning_rate": 3.6993243243243245e-05, |
|
"loss": 0.9753, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 3.6824324324324326e-05, |
|
"loss": 0.7495, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 3.665540540540541e-05, |
|
"loss": 0.9549, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 3.648648648648649e-05, |
|
"loss": 0.9879, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 3.631756756756757e-05, |
|
"loss": 1.0598, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 9.69, |
|
"learning_rate": 3.6148648648648655e-05, |
|
"loss": 0.8778, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 3.5979729729729736e-05, |
|
"loss": 1.0295, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 9.74, |
|
"learning_rate": 3.581081081081081e-05, |
|
"loss": 0.7058, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 9.76, |
|
"learning_rate": 3.564189189189189e-05, |
|
"loss": 0.6549, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 9.79, |
|
"learning_rate": 3.547297297297297e-05, |
|
"loss": 0.8373, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 3.530405405405405e-05, |
|
"loss": 1.014, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 3.513513513513514e-05, |
|
"loss": 0.9325, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 3.496621621621622e-05, |
|
"loss": 1.0468, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 3.47972972972973e-05, |
|
"loss": 0.7643, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 3.462837837837838e-05, |
|
"loss": 0.9248, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 9.93, |
|
"learning_rate": 3.445945945945946e-05, |
|
"loss": 0.911, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 3.429054054054054e-05, |
|
"loss": 0.986, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 3.412162162162162e-05, |
|
"loss": 0.8566, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 3.3952702702702704e-05, |
|
"loss": 0.9547, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 3.3783783783783784e-05, |
|
"loss": 0.8804, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 10.04, |
|
"learning_rate": 3.3614864864864865e-05, |
|
"loss": 0.9358, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 10.07, |
|
"learning_rate": 3.344594594594595e-05, |
|
"loss": 0.7939, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 10.09, |
|
"learning_rate": 3.327702702702703e-05, |
|
"loss": 0.8999, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 3.310810810810811e-05, |
|
"loss": 0.6993, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 10.14, |
|
"learning_rate": 3.293918918918919e-05, |
|
"loss": 0.8838, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 10.16, |
|
"learning_rate": 3.277027027027027e-05, |
|
"loss": 1.0079, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 3.260135135135135e-05, |
|
"loss": 0.7096, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 3.2432432432432436e-05, |
|
"loss": 0.9228, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 3.226351351351352e-05, |
|
"loss": 0.8884, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 3.20945945945946e-05, |
|
"loss": 0.7431, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 3.192567567567568e-05, |
|
"loss": 0.8418, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 3.175675675675676e-05, |
|
"loss": 0.8201, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 10.32, |
|
"learning_rate": 3.158783783783784e-05, |
|
"loss": 0.8093, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 3.141891891891892e-05, |
|
"loss": 0.8988, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 10.37, |
|
"learning_rate": 3.125e-05, |
|
"loss": 0.7933, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 10.39, |
|
"learning_rate": 3.108108108108108e-05, |
|
"loss": 0.8091, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 10.42, |
|
"learning_rate": 3.091216216216216e-05, |
|
"loss": 0.7553, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 10.44, |
|
"learning_rate": 3.074324324324325e-05, |
|
"loss": 0.6428, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 10.46, |
|
"learning_rate": 3.057432432432433e-05, |
|
"loss": 0.9557, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 10.49, |
|
"learning_rate": 3.0405405405405407e-05, |
|
"loss": 0.7041, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 10.51, |
|
"learning_rate": 3.0236486486486488e-05, |
|
"loss": 0.875, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 3.006756756756757e-05, |
|
"loss": 0.8463, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 10.56, |
|
"learning_rate": 2.9898648648648653e-05, |
|
"loss": 0.7783, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 2.9729729729729733e-05, |
|
"loss": 0.7744, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 2.9560810810810814e-05, |
|
"loss": 0.6589, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 2.9391891891891894e-05, |
|
"loss": 0.9363, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 2.9222972972972972e-05, |
|
"loss": 0.7855, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 10.67, |
|
"learning_rate": 2.9054054054054052e-05, |
|
"loss": 0.7207, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 2.888513513513514e-05, |
|
"loss": 0.7331, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 10.72, |
|
"learning_rate": 2.8716216216216217e-05, |
|
"loss": 0.8589, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 10.74, |
|
"learning_rate": 2.8547297297297298e-05, |
|
"loss": 0.8753, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 10.77, |
|
"learning_rate": 2.8378378378378378e-05, |
|
"loss": 0.9315, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 2.820945945945946e-05, |
|
"loss": 0.8994, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 10.81, |
|
"learning_rate": 2.8040540540540543e-05, |
|
"loss": 0.8125, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 2.7871621621621624e-05, |
|
"loss": 0.9101, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 10.86, |
|
"learning_rate": 2.7702702702702704e-05, |
|
"loss": 0.7553, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 2.7533783783783785e-05, |
|
"loss": 0.9303, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 2.7364864864864865e-05, |
|
"loss": 0.9425, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 2.719594594594595e-05, |
|
"loss": 0.7517, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 2.702702702702703e-05, |
|
"loss": 0.8549, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 10.97, |
|
"learning_rate": 2.685810810810811e-05, |
|
"loss": 0.8171, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 2.668918918918919e-05, |
|
"loss": 1.0228, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 11.02, |
|
"learning_rate": 2.652027027027027e-05, |
|
"loss": 0.5039, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 11.04, |
|
"learning_rate": 2.635135135135135e-05, |
|
"loss": 0.6797, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 11.07, |
|
"learning_rate": 2.6182432432432437e-05, |
|
"loss": 0.7152, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 2.6013513513513514e-05, |
|
"loss": 0.8649, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 2.5844594594594595e-05, |
|
"loss": 0.737, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 11.14, |
|
"learning_rate": 2.5675675675675675e-05, |
|
"loss": 0.8647, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 2.5506756756756756e-05, |
|
"loss": 0.6395, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 2.533783783783784e-05, |
|
"loss": 0.7276, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 2.516891891891892e-05, |
|
"loss": 0.8244, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.9237, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 2.4831081081081082e-05, |
|
"loss": 0.7399, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 2.4662162162162162e-05, |
|
"loss": 1.0337, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 11.3, |
|
"learning_rate": 2.4493243243243243e-05, |
|
"loss": 0.6786, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 11.32, |
|
"learning_rate": 2.4324324324324327e-05, |
|
"loss": 0.7909, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 11.35, |
|
"learning_rate": 2.4155405405405408e-05, |
|
"loss": 0.7042, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 11.37, |
|
"learning_rate": 2.398648648648649e-05, |
|
"loss": 0.6577, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 11.39, |
|
"learning_rate": 2.381756756756757e-05, |
|
"loss": 0.9994, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 11.42, |
|
"learning_rate": 2.364864864864865e-05, |
|
"loss": 0.8041, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 11.44, |
|
"learning_rate": 2.347972972972973e-05, |
|
"loss": 0.9023, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 2.331081081081081e-05, |
|
"loss": 0.7826, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 11.49, |
|
"learning_rate": 2.314189189189189e-05, |
|
"loss": 0.6315, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 2.2972972972972976e-05, |
|
"loss": 0.7749, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 2.2804054054054056e-05, |
|
"loss": 0.6186, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 2.2635135135135137e-05, |
|
"loss": 0.7855, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 2.2466216216216218e-05, |
|
"loss": 0.8027, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 11.6, |
|
"learning_rate": 2.2297297297297298e-05, |
|
"loss": 0.6253, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 2.212837837837838e-05, |
|
"loss": 0.8856, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 11.65, |
|
"learning_rate": 2.195945945945946e-05, |
|
"loss": 0.8436, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 11.67, |
|
"learning_rate": 2.179054054054054e-05, |
|
"loss": 0.9123, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 11.7, |
|
"learning_rate": 2.1621621621621624e-05, |
|
"loss": 0.7693, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 11.72, |
|
"learning_rate": 2.1452702702702705e-05, |
|
"loss": 0.7024, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 11.74, |
|
"learning_rate": 2.1283783783783785e-05, |
|
"loss": 0.874, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 11.77, |
|
"learning_rate": 2.1114864864864866e-05, |
|
"loss": 0.9008, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 11.79, |
|
"learning_rate": 2.0945945945945947e-05, |
|
"loss": 0.752, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 2.0777027027027027e-05, |
|
"loss": 0.8127, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 11.84, |
|
"learning_rate": 2.0608108108108108e-05, |
|
"loss": 0.6964, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 2.043918918918919e-05, |
|
"loss": 0.8118, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 2.0270270270270273e-05, |
|
"loss": 0.6921, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 2.0101351351351353e-05, |
|
"loss": 0.7776, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 1.9932432432432434e-05, |
|
"loss": 0.6574, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 11.95, |
|
"learning_rate": 1.9763513513513515e-05, |
|
"loss": 0.8438, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 1.9594594594594595e-05, |
|
"loss": 0.7435, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 1.9425675675675676e-05, |
|
"loss": 0.7257, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 12.02, |
|
"learning_rate": 1.9256756756756756e-05, |
|
"loss": 0.7812, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 12.05, |
|
"learning_rate": 1.9087837837837837e-05, |
|
"loss": 0.7275, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 0.6814, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.826, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 1.8581081081081082e-05, |
|
"loss": 0.6081, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 1.8412162162162163e-05, |
|
"loss": 0.7678, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 1.8243243243243244e-05, |
|
"loss": 0.722, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 1.8074324324324328e-05, |
|
"loss": 0.6993, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 1.7905405405405405e-05, |
|
"loss": 1.012, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 12.23, |
|
"learning_rate": 1.7736486486486486e-05, |
|
"loss": 0.7179, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 1.756756756756757e-05, |
|
"loss": 0.7912, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 12.28, |
|
"learning_rate": 1.739864864864865e-05, |
|
"loss": 0.7126, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 12.3, |
|
"learning_rate": 1.722972972972973e-05, |
|
"loss": 0.6817, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 12.33, |
|
"learning_rate": 1.706081081081081e-05, |
|
"loss": 0.5384, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 12.35, |
|
"learning_rate": 1.6891891891891892e-05, |
|
"loss": 0.7755, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 1.6722972972972976e-05, |
|
"loss": 0.8104, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 12.4, |
|
"learning_rate": 1.6554054054054053e-05, |
|
"loss": 0.6242, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 12.42, |
|
"learning_rate": 1.6385135135135134e-05, |
|
"loss": 0.6571, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 0.9234, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 12.47, |
|
"learning_rate": 1.60472972972973e-05, |
|
"loss": 0.8154, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 1.587837837837838e-05, |
|
"loss": 0.7264, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 1.570945945945946e-05, |
|
"loss": 0.7055, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 1.554054054054054e-05, |
|
"loss": 0.5859, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 1.5371621621621625e-05, |
|
"loss": 0.7994, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 12.58, |
|
"learning_rate": 1.5202702702702704e-05, |
|
"loss": 0.9122, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 1.5033783783783784e-05, |
|
"loss": 0.8031, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 12.63, |
|
"learning_rate": 1.4864864864864867e-05, |
|
"loss": 0.7384, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 12.65, |
|
"learning_rate": 1.4695945945945947e-05, |
|
"loss": 0.6169, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 12.68, |
|
"learning_rate": 1.4527027027027026e-05, |
|
"loss": 0.6522, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 12.7, |
|
"learning_rate": 1.4358108108108108e-05, |
|
"loss": 0.6772, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 12.72, |
|
"learning_rate": 1.4189189189189189e-05, |
|
"loss": 0.7, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 1.4020270270270271e-05, |
|
"loss": 0.763, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 12.77, |
|
"learning_rate": 1.3851351351351352e-05, |
|
"loss": 0.5662, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 1.3682432432432433e-05, |
|
"loss": 0.8541, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 12.82, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 0.7007, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 1.3344594594594596e-05, |
|
"loss": 0.683, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 1.3175675675675675e-05, |
|
"loss": 0.6969, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 1.3006756756756757e-05, |
|
"loss": 0.4899, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 1.2837837837837838e-05, |
|
"loss": 0.7863, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 12.93, |
|
"learning_rate": 1.266891891891892e-05, |
|
"loss": 0.753, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.6538, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 12.98, |
|
"learning_rate": 1.2331081081081081e-05, |
|
"loss": 0.7702, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 1.2162162162162164e-05, |
|
"loss": 0.774, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 13.03, |
|
"learning_rate": 1.1993243243243244e-05, |
|
"loss": 0.9922, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 1.1824324324324325e-05, |
|
"loss": 0.6902, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 1.1655405405405405e-05, |
|
"loss": 0.6176, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 13.1, |
|
"learning_rate": 1.1486486486486488e-05, |
|
"loss": 0.7269, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 1.1317567567567568e-05, |
|
"loss": 0.6586, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 1.1148648648648649e-05, |
|
"loss": 0.7033, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 1.097972972972973e-05, |
|
"loss": 0.5245, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 0.5829, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 13.21, |
|
"learning_rate": 1.0641891891891893e-05, |
|
"loss": 0.774, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 13.23, |
|
"learning_rate": 1.0472972972972973e-05, |
|
"loss": 0.8784, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 13.26, |
|
"learning_rate": 1.0304054054054054e-05, |
|
"loss": 0.8832, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 13.28, |
|
"learning_rate": 1.0135135135135136e-05, |
|
"loss": 0.7991, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 13.3, |
|
"learning_rate": 9.966216216216217e-06, |
|
"loss": 0.6586, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 13.33, |
|
"learning_rate": 9.797297297297298e-06, |
|
"loss": 0.7324, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 9.628378378378378e-06, |
|
"loss": 0.703, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 0.6091, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"learning_rate": 9.290540540540541e-06, |
|
"loss": 0.6275, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 9.121621621621622e-06, |
|
"loss": 0.4852, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 8.952702702702702e-06, |
|
"loss": 0.5401, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 8.783783783783785e-06, |
|
"loss": 0.6038, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 8.614864864864865e-06, |
|
"loss": 0.5536, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 13.51, |
|
"learning_rate": 8.445945945945946e-06, |
|
"loss": 0.5804, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 8.277027027027027e-06, |
|
"loss": 0.7556, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 13.56, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 0.621, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 13.58, |
|
"learning_rate": 7.93918918918919e-06, |
|
"loss": 0.6371, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 13.61, |
|
"learning_rate": 7.77027027027027e-06, |
|
"loss": 0.8664, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 13.63, |
|
"learning_rate": 7.601351351351352e-06, |
|
"loss": 0.7345, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 13.65, |
|
"learning_rate": 7.432432432432433e-06, |
|
"loss": 0.7448, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 13.68, |
|
"learning_rate": 7.263513513513513e-06, |
|
"loss": 0.611, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 13.7, |
|
"learning_rate": 7.0945945945945946e-06, |
|
"loss": 0.6497, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 6.925675675675676e-06, |
|
"loss": 0.9, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 0.5201, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 6.587837837837837e-06, |
|
"loss": 0.6116, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 6.418918918918919e-06, |
|
"loss": 0.754, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.7413, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 6.081081081081082e-06, |
|
"loss": 0.8718, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 13.86, |
|
"learning_rate": 5.912162162162162e-06, |
|
"loss": 0.739, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 5.743243243243244e-06, |
|
"loss": 0.9099, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 13.91, |
|
"learning_rate": 5.5743243243243245e-06, |
|
"loss": 0.5441, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 13.93, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 0.7775, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 13.96, |
|
"learning_rate": 5.236486486486487e-06, |
|
"loss": 0.7061, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 13.98, |
|
"learning_rate": 5.067567567567568e-06, |
|
"loss": 0.568, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 4.898648648648649e-06, |
|
"loss": 0.4962, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 4.72972972972973e-06, |
|
"loss": 0.6171, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 14.05, |
|
"learning_rate": 4.560810810810811e-06, |
|
"loss": 0.6432, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 14.07, |
|
"learning_rate": 4.391891891891892e-06, |
|
"loss": 0.6401, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 14.1, |
|
"learning_rate": 4.222972972972973e-06, |
|
"loss": 0.7806, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 0.6595, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 14.14, |
|
"learning_rate": 3.885135135135135e-06, |
|
"loss": 0.4667, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 14.17, |
|
"learning_rate": 3.7162162162162166e-06, |
|
"loss": 0.7631, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 3.5472972972972973e-06, |
|
"loss": 0.6069, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 14.21, |
|
"learning_rate": 3.3783783783783788e-06, |
|
"loss": 0.7084, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 14.24, |
|
"learning_rate": 3.2094594594594594e-06, |
|
"loss": 0.6438, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 14.26, |
|
"learning_rate": 3.040540540540541e-06, |
|
"loss": 0.6691, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 14.28, |
|
"learning_rate": 2.871621621621622e-06, |
|
"loss": 0.5838, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 14.31, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 0.6509, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 2.533783783783784e-06, |
|
"loss": 0.3435, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"learning_rate": 2.364864864864865e-06, |
|
"loss": 0.7366, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 2.195945945945946e-06, |
|
"loss": 0.654, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 14.4, |
|
"learning_rate": 2.0270270270270273e-06, |
|
"loss": 0.7386, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 14.42, |
|
"learning_rate": 1.8581081081081083e-06, |
|
"loss": 0.5989, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 14.45, |
|
"learning_rate": 1.6891891891891894e-06, |
|
"loss": 0.888, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 14.47, |
|
"learning_rate": 1.5202702702702704e-06, |
|
"loss": 0.6642, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 14.49, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 0.7903, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"learning_rate": 1.1824324324324326e-06, |
|
"loss": 0.5658, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 14.54, |
|
"learning_rate": 1.0135135135135136e-06, |
|
"loss": 0.7657, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 14.56, |
|
"learning_rate": 8.445945945945947e-07, |
|
"loss": 0.6699, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 14.59, |
|
"learning_rate": 6.756756756756758e-07, |
|
"loss": 0.6567, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 5.067567567567568e-07, |
|
"loss": 0.8468, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 14.63, |
|
"learning_rate": 3.378378378378379e-07, |
|
"loss": 0.7909, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 14.66, |
|
"learning_rate": 1.6891891891891894e-07, |
|
"loss": 0.6607, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"learning_rate": 0.0, |
|
"loss": 0.7368, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 14.68, |
|
"step": 630, |
|
"total_flos": 2.2792813421533594e+17, |
|
"train_loss": 1.167946250949587, |
|
"train_runtime": 20172.5226, |
|
"train_samples_per_second": 0.766, |
|
"train_steps_per_second": 0.031 |
|
} |
|
], |
|
"max_steps": 630, |
|
"num_train_epochs": 15, |
|
"total_flos": 2.2792813421533594e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|