|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.8530670470756063, |
|
"eval_steps": 100, |
|
"global_step": 1500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-05, |
|
"loss": 1.8229, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999950203346446e-05, |
|
"loss": 1.8352, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999980081358417e-05, |
|
"loss": 1.742, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999955183130829e-05, |
|
"loss": 1.6315, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999920325751068e-05, |
|
"loss": 1.6204, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999875509357998e-05, |
|
"loss": 1.5485, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999820734130155e-05, |
|
"loss": 1.4713, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999756000285748e-05, |
|
"loss": 1.4675, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9996813080826606e-05, |
|
"loss": 1.5091, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999596657818445e-05, |
|
"loss": 1.4009, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9995020498303275e-05, |
|
"loss": 1.2992, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999397484495198e-05, |
|
"loss": 1.2111, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.99928296222962e-05, |
|
"loss": 1.2281, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999158483489818e-05, |
|
"loss": 1.2099, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.99902404877168e-05, |
|
"loss": 1.1259, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998879658610761e-05, |
|
"loss": 1.022, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998725313582272e-05, |
|
"loss": 1.1276, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998561014301081e-05, |
|
"loss": 1.1276, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998386761421714e-05, |
|
"loss": 1.2404, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998202555638346e-05, |
|
"loss": 1.1622, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998008397684806e-05, |
|
"loss": 1.1335, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997804288334565e-05, |
|
"loss": 1.2187, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9975902284007405e-05, |
|
"loss": 1.0678, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.997366218736091e-05, |
|
"loss": 1.0195, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.99713226023301e-05, |
|
"loss": 1.0138, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9968883538235264e-05, |
|
"loss": 1.0706, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996634500479297e-05, |
|
"loss": 0.9617, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996370701211607e-05, |
|
"loss": 1.034, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.996096957071361e-05, |
|
"loss": 0.9952, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9958132691490816e-05, |
|
"loss": 0.9598, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995519638574907e-05, |
|
"loss": 1.0342, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9952160665185825e-05, |
|
"loss": 0.9147, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994902554189458e-05, |
|
"loss": 0.924, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994579102836482e-05, |
|
"loss": 0.9021, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.994245713748198e-05, |
|
"loss": 1.0732, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993902388252739e-05, |
|
"loss": 0.926, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9935491277178236e-05, |
|
"loss": 0.9082, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993185933550745e-05, |
|
"loss": 0.8735, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.992812807198372e-05, |
|
"loss": 1.0212, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9924297501471414e-05, |
|
"loss": 0.9495, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9920367639230483e-05, |
|
"loss": 0.9101, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991633850091645e-05, |
|
"loss": 1.0533, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991221010258034e-05, |
|
"loss": 0.8759, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.990798246066856e-05, |
|
"loss": 0.969, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9903655592022916e-05, |
|
"loss": 0.8463, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9899229513880494e-05, |
|
"loss": 0.844, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989470424387361e-05, |
|
"loss": 0.8386, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989007980002973e-05, |
|
"loss": 0.93, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988535620077138e-05, |
|
"loss": 0.8551, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.988053346491613e-05, |
|
"loss": 0.8633, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.987561161167648e-05, |
|
"loss": 0.9056, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9870590660659755e-05, |
|
"loss": 0.9366, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.986547063186808e-05, |
|
"loss": 1.0469, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.98602515456983e-05, |
|
"loss": 0.8723, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.985493342294184e-05, |
|
"loss": 0.9484, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9849516284784676e-05, |
|
"loss": 0.8346, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.984400015280724e-05, |
|
"loss": 0.789, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.983838504898433e-05, |
|
"loss": 0.8986, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9832670995685006e-05, |
|
"loss": 0.8799, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9826858015672536e-05, |
|
"loss": 0.9028, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.982094613210428e-05, |
|
"loss": 0.8908, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9814935368531586e-05, |
|
"loss": 0.8273, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980882574889973e-05, |
|
"loss": 0.784, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980261729754781e-05, |
|
"loss": 0.9336, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9796310039208623e-05, |
|
"loss": 0.8943, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.978990399900861e-05, |
|
"loss": 1.1, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9783399202467714e-05, |
|
"loss": 0.877, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9776795675499296e-05, |
|
"loss": 0.8291, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9770093444410046e-05, |
|
"loss": 0.7997, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9763292535899866e-05, |
|
"loss": 0.7911, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9756392977061736e-05, |
|
"loss": 0.8996, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974939479538166e-05, |
|
"loss": 0.8631, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974229801873854e-05, |
|
"loss": 0.8303, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9735102675404004e-05, |
|
"loss": 0.8708, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.972780879404239e-05, |
|
"loss": 0.998, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.972041640371056e-05, |
|
"loss": 1.1049, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.971292553385783e-05, |
|
"loss": 0.774, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.970533621432581e-05, |
|
"loss": 0.8142, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.969764847534832e-05, |
|
"loss": 0.9968, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9689862347551255e-05, |
|
"loss": 0.9622, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9681977861952464e-05, |
|
"loss": 0.8342, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.967399504996162e-05, |
|
"loss": 0.9, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.966591394338012e-05, |
|
"loss": 0.8515, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.965773457440092e-05, |
|
"loss": 0.7937, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.964945697560844e-05, |
|
"loss": 0.9577, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9641081179978424e-05, |
|
"loss": 0.8233, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.963260722087779e-05, |
|
"loss": 0.9769, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9624035132064526e-05, |
|
"loss": 0.8927, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.961536494768754e-05, |
|
"loss": 0.9383, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.960659670228652e-05, |
|
"loss": 0.9177, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.959773043079181e-05, |
|
"loss": 0.8026, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.958876616852427e-05, |
|
"loss": 0.9647, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9579703951195113e-05, |
|
"loss": 0.7545, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.957054381490579e-05, |
|
"loss": 0.7836, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9561285796147824e-05, |
|
"loss": 0.8803, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955192993180269e-05, |
|
"loss": 0.8988, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9542476259141656e-05, |
|
"loss": 0.8418, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.953292481582561e-05, |
|
"loss": 0.9004, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9523275639904944e-05, |
|
"loss": 0.9159, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.951352876981939e-05, |
|
"loss": 0.7605, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 0.7751120924949646, |
|
"eval_runtime": 2.7398, |
|
"eval_samples_per_second": 3.285, |
|
"eval_steps_per_second": 0.73, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9503684244397894e-05, |
|
"loss": 0.8228, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.949374210285838e-05, |
|
"loss": 0.7176, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.948370238480769e-05, |
|
"loss": 0.9403, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9473565130241376e-05, |
|
"loss": 0.7711, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.946333037954354e-05, |
|
"loss": 0.8349, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.945299817348669e-05, |
|
"loss": 0.7899, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.944256855323157e-05, |
|
"loss": 0.913, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9432041560326995e-05, |
|
"loss": 0.9574, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.942141723670969e-05, |
|
"loss": 0.8848, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.941069562470411e-05, |
|
"loss": 0.9267, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.939987676702229e-05, |
|
"loss": 0.7779, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.938896070676366e-05, |
|
"loss": 0.9766, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9377947487414886e-05, |
|
"loss": 0.8737, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.936683715284968e-05, |
|
"loss": 0.7962, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.935562974732865e-05, |
|
"loss": 0.8711, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.934432531549909e-05, |
|
"loss": 0.8212, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.933292390239483e-05, |
|
"loss": 0.8364, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9321425553436054e-05, |
|
"loss": 0.8737, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9309830314429095e-05, |
|
"loss": 0.8382, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.92981382315663e-05, |
|
"loss": 0.9143, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9286349351425774e-05, |
|
"loss": 0.9366, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.927446372097127e-05, |
|
"loss": 0.7771, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9262481387551976e-05, |
|
"loss": 0.8218, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.925040239890227e-05, |
|
"loss": 0.9309, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.923822680314162e-05, |
|
"loss": 0.8345, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.922595464877435e-05, |
|
"loss": 0.8479, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.921358598468942e-05, |
|
"loss": 0.8879, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.920112086016029e-05, |
|
"loss": 0.8235, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.918855932484468e-05, |
|
"loss": 0.8693, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.917590142878437e-05, |
|
"loss": 0.8928, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.916314722240504e-05, |
|
"loss": 0.9152, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.915029675651604e-05, |
|
"loss": 0.9386, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.913735008231016e-05, |
|
"loss": 0.8565, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9124307251363525e-05, |
|
"loss": 0.8073, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.911116831563524e-05, |
|
"loss": 0.8975, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.909793332746733e-05, |
|
"loss": 0.7949, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.908460233958444e-05, |
|
"loss": 0.8059, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.907117540509366e-05, |
|
"loss": 0.8424, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9057652577484293e-05, |
|
"loss": 0.7911, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9044033910627676e-05, |
|
"loss": 0.858, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9030319458776926e-05, |
|
"loss": 0.977, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9016509276566746e-05, |
|
"loss": 0.8803, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9002603419013205e-05, |
|
"loss": 0.9155, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8988601941513526e-05, |
|
"loss": 0.6968, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.897450489984583e-05, |
|
"loss": 0.8425, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.896031235016897e-05, |
|
"loss": 0.9646, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.894602434902226e-05, |
|
"loss": 0.8387, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.893164095332527e-05, |
|
"loss": 0.9245, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.89171622203776e-05, |
|
"loss": 0.861, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.890258820785865e-05, |
|
"loss": 0.7925, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8887918973827375e-05, |
|
"loss": 0.8197, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.887315457672208e-05, |
|
"loss": 0.8404, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8858295075360175e-05, |
|
"loss": 0.8094, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.884334052893793e-05, |
|
"loss": 0.8282, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.882829099703026e-05, |
|
"loss": 0.7838, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.881314653959046e-05, |
|
"loss": 0.8562, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8797907216950015e-05, |
|
"loss": 0.818, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.878257308981827e-05, |
|
"loss": 0.7874, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8767144219282325e-05, |
|
"loss": 0.9106, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.875162066680664e-05, |
|
"loss": 0.9026, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8736002494232904e-05, |
|
"loss": 0.7882, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.872028976377974e-05, |
|
"loss": 1.0285, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.870448253804244e-05, |
|
"loss": 0.8034, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.868858087999279e-05, |
|
"loss": 0.7977, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.86725848529787e-05, |
|
"loss": 0.885, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.86564945207241e-05, |
|
"loss": 0.804, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.864030994732852e-05, |
|
"loss": 0.9618, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.862403119726702e-05, |
|
"loss": 0.8042, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.860765833538974e-05, |
|
"loss": 1.0271, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.85911914269218e-05, |
|
"loss": 0.799, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8574630537462954e-05, |
|
"loss": 0.7454, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.855797573298734e-05, |
|
"loss": 0.7428, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8541227079843246e-05, |
|
"loss": 0.9079, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.852438464475283e-05, |
|
"loss": 0.7964, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8507448494811834e-05, |
|
"loss": 0.9262, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.849041869748935e-05, |
|
"loss": 0.7817, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.847329532062753e-05, |
|
"loss": 0.759, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.845607843244132e-05, |
|
"loss": 0.9029, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.843876810151821e-05, |
|
"loss": 0.7191, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8421364396817905e-05, |
|
"loss": 0.8861, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.840386738767211e-05, |
|
"loss": 0.982, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.838627714378423e-05, |
|
"loss": 0.8453, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8368593735229074e-05, |
|
"loss": 0.9402, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8350817232452625e-05, |
|
"loss": 0.7882, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8332947706271706e-05, |
|
"loss": 0.8458, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.831498522787372e-05, |
|
"loss": 0.8955, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.829692986881638e-05, |
|
"loss": 0.7777, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8278781701027395e-05, |
|
"loss": 0.7341, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.826054079680422e-05, |
|
"loss": 0.8234, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.824220722881371e-05, |
|
"loss": 0.7693, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8223781070091914e-05, |
|
"loss": 0.8162, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8205262394043705e-05, |
|
"loss": 0.8983, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.818665127444253e-05, |
|
"loss": 0.9481, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.816794778543011e-05, |
|
"loss": 0.9685, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.814915200151614e-05, |
|
"loss": 0.9092, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.813026399757799e-05, |
|
"loss": 0.7715, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.811128384886041e-05, |
|
"loss": 0.8279, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.809221163097523e-05, |
|
"loss": 0.7468, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.807304741990106e-05, |
|
"loss": 0.7938, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.805379129198299e-05, |
|
"loss": 0.9049, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_loss": 0.7435702681541443, |
|
"eval_runtime": 2.8319, |
|
"eval_samples_per_second": 3.178, |
|
"eval_steps_per_second": 0.706, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.803444332393227e-05, |
|
"loss": 0.8812, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.801500359282605e-05, |
|
"loss": 0.84, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7995472176106974e-05, |
|
"loss": 0.8609, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.797584915158301e-05, |
|
"loss": 0.8207, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.795613459742702e-05, |
|
"loss": 0.7714, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7936328592176516e-05, |
|
"loss": 0.7537, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7916431214733314e-05, |
|
"loss": 0.8651, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7896442544363246e-05, |
|
"loss": 0.8055, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.787636266069582e-05, |
|
"loss": 0.8936, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.785619164372391e-05, |
|
"loss": 0.701, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.783592957380346e-05, |
|
"loss": 0.7145, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.781557653165313e-05, |
|
"loss": 0.896, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.779513259835399e-05, |
|
"loss": 0.7178, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.777459785534918e-05, |
|
"loss": 0.8032, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7753972384443644e-05, |
|
"loss": 0.8792, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.773325626780373e-05, |
|
"loss": 0.7246, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.771244958795688e-05, |
|
"loss": 0.8574, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7691552427791365e-05, |
|
"loss": 0.7291, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7670564870555864e-05, |
|
"loss": 0.8259, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7649486999859185e-05, |
|
"loss": 0.7818, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.762831889966992e-05, |
|
"loss": 0.8693, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.760706065431612e-05, |
|
"loss": 0.8281, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.758571234848494e-05, |
|
"loss": 0.8359, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7564274067222314e-05, |
|
"loss": 0.8119, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.754274589593262e-05, |
|
"loss": 0.8577, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.752112792037832e-05, |
|
"loss": 0.7509, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7499420226679646e-05, |
|
"loss": 0.7648, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.747762290131424e-05, |
|
"loss": 0.7964, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7455736031116804e-05, |
|
"loss": 0.8229, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7433759703278785e-05, |
|
"loss": 0.7705, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.741169400534797e-05, |
|
"loss": 0.8954, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7389539025228217e-05, |
|
"loss": 0.8405, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7367294851179e-05, |
|
"loss": 0.7539, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.734496157181518e-05, |
|
"loss": 0.7844, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.732253927610657e-05, |
|
"loss": 0.7417, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7300028053377564e-05, |
|
"loss": 0.8239, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7277427993306867e-05, |
|
"loss": 0.7526, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7254739185927066e-05, |
|
"loss": 0.8431, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7231961721624296e-05, |
|
"loss": 0.737, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.720909569113787e-05, |
|
"loss": 0.751, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.718614118555993e-05, |
|
"loss": 0.8756, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.71630982963351e-05, |
|
"loss": 0.7679, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.713996711526005e-05, |
|
"loss": 0.7593, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7116747734483245e-05, |
|
"loss": 0.8194, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7093440246504465e-05, |
|
"loss": 0.9187, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.70700447441745e-05, |
|
"loss": 0.7543, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.704656132069478e-05, |
|
"loss": 0.6961, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7022990069616973e-05, |
|
"loss": 0.8313, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.699933108484262e-05, |
|
"loss": 0.8524, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.69755844606228e-05, |
|
"loss": 0.7614, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.695175029155771e-05, |
|
"loss": 0.7678, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.692782867259627e-05, |
|
"loss": 0.9097, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.690381969903583e-05, |
|
"loss": 0.8819, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.6879723466521706e-05, |
|
"loss": 0.883, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.685554007104684e-05, |
|
"loss": 0.7365, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.683126960895141e-05, |
|
"loss": 0.8576, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.680691217692242e-05, |
|
"loss": 0.8227, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.678246787199338e-05, |
|
"loss": 0.7949, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.6757936791543846e-05, |
|
"loss": 0.8524, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.6733319033299075e-05, |
|
"loss": 0.7235, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.670861469532963e-05, |
|
"loss": 0.8348, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.668382387605098e-05, |
|
"loss": 0.7539, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.665894667422311e-05, |
|
"loss": 0.7968, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.663398318895013e-05, |
|
"loss": 1.0207, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.660893351967988e-05, |
|
"loss": 0.7758, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.658379776620354e-05, |
|
"loss": 0.8379, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6558576028655226e-05, |
|
"loss": 0.8968, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6533268407511586e-05, |
|
"loss": 0.8639, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.650787500359141e-05, |
|
"loss": 0.713, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6482395918055197e-05, |
|
"loss": 0.9117, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6456831252404844e-05, |
|
"loss": 0.7259, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.64311811084831e-05, |
|
"loss": 0.7107, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.64054455884733e-05, |
|
"loss": 0.8522, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.637962479489883e-05, |
|
"loss": 0.764, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.635371883062286e-05, |
|
"loss": 0.8385, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6327727798847785e-05, |
|
"loss": 0.9269, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.630165180311493e-05, |
|
"loss": 0.7428, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6275490947304084e-05, |
|
"loss": 0.8363, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6249245335633086e-05, |
|
"loss": 0.8526, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6222915072657424e-05, |
|
"loss": 0.8759, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6196500263269824e-05, |
|
"loss": 0.7692, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.617000101269982e-05, |
|
"loss": 0.8109, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.614341742651332e-05, |
|
"loss": 0.6978, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6116749610612214e-05, |
|
"loss": 0.7617, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6089997671233955e-05, |
|
"loss": 0.8812, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.606316171495109e-05, |
|
"loss": 0.7013, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.60362418486709e-05, |
|
"loss": 0.8073, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.6009238179634916e-05, |
|
"loss": 0.8615, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.598215081541853e-05, |
|
"loss": 0.9124, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.595497986393055e-05, |
|
"loss": 0.8016, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.592772543341276e-05, |
|
"loss": 0.8414, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.590038763243953e-05, |
|
"loss": 0.7662, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.587296656991734e-05, |
|
"loss": 0.7667, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.584546235508436e-05, |
|
"loss": 0.8404, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.581787509751001e-05, |
|
"loss": 0.9285, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5790204907094554e-05, |
|
"loss": 0.863, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5762451894068606e-05, |
|
"loss": 0.941, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.573461616899275e-05, |
|
"loss": 0.9033, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.570669784275706e-05, |
|
"loss": 0.8648, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.567869702658067e-05, |
|
"loss": 0.8397, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"eval_loss": 0.7216992974281311, |
|
"eval_runtime": 2.8122, |
|
"eval_samples_per_second": 3.2, |
|
"eval_steps_per_second": 0.711, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.565061383201135e-05, |
|
"loss": 0.7869, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.562244837092501e-05, |
|
"loss": 0.8699, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.559420075552532e-05, |
|
"loss": 0.7533, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5565871098343206e-05, |
|
"loss": 0.7445, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.553745951223645e-05, |
|
"loss": 0.8536, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.55089661103892e-05, |
|
"loss": 0.8168, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.548039100631153e-05, |
|
"loss": 0.7869, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.5451734313839026e-05, |
|
"loss": 0.7409, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.5422996147132257e-05, |
|
"loss": 0.8334, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.53941766206764e-05, |
|
"loss": 0.7438, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.536527584928073e-05, |
|
"loss": 0.8303, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.533629394807819e-05, |
|
"loss": 0.7954, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5307231032524897e-05, |
|
"loss": 0.694, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5278087218399735e-05, |
|
"loss": 0.8333, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.524886262180387e-05, |
|
"loss": 0.8683, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5219557359160244e-05, |
|
"loss": 0.8963, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.519017154721321e-05, |
|
"loss": 0.8737, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.516070530302795e-05, |
|
"loss": 0.8319, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.51311587439901e-05, |
|
"loss": 0.7937, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.510153198780524e-05, |
|
"loss": 0.7702, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.507182515249845e-05, |
|
"loss": 0.9189, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.504203835641379e-05, |
|
"loss": 0.8915, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.501217171821389e-05, |
|
"loss": 0.8164, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4982225356879436e-05, |
|
"loss": 0.8632, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4952199391708716e-05, |
|
"loss": 0.7958, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.492209394231714e-05, |
|
"loss": 0.7706, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.489190912863675e-05, |
|
"loss": 0.8457, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.486164507091578e-05, |
|
"loss": 0.8253, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.483130188971812e-05, |
|
"loss": 0.8703, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4800879705922885e-05, |
|
"loss": 0.7406, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4770378640723904e-05, |
|
"loss": 0.8331, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4739798815629264e-05, |
|
"loss": 0.8479, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.47091403524608e-05, |
|
"loss": 0.8482, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.4678403373353614e-05, |
|
"loss": 0.848, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.464758800075561e-05, |
|
"loss": 0.8397, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.461669435742698e-05, |
|
"loss": 0.7962, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.458572256643973e-05, |
|
"loss": 0.8888, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.455467275117718e-05, |
|
"loss": 0.7731, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.452354503533348e-05, |
|
"loss": 0.9121, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.449233954291313e-05, |
|
"loss": 0.7023, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.446105639823044e-05, |
|
"loss": 0.7888, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.442969572590909e-05, |
|
"loss": 0.7791, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.439825765088161e-05, |
|
"loss": 0.8437, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.436674229838887e-05, |
|
"loss": 1.0018, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.433514979397959e-05, |
|
"loss": 0.8058, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4303480263509855e-05, |
|
"loss": 0.6082, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.42717338331426e-05, |
|
"loss": 0.8858, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4239910629347096e-05, |
|
"loss": 0.8144, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.420801077889848e-05, |
|
"loss": 0.8518, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.41760344088772e-05, |
|
"loss": 0.7277, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.414398164666856e-05, |
|
"loss": 0.9246, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.41118526199622e-05, |
|
"loss": 0.8504, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.407964745675153e-05, |
|
"loss": 0.7957, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.404736628533332e-05, |
|
"loss": 0.7425, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.401500923430712e-05, |
|
"loss": 0.9097, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.398257643257474e-05, |
|
"loss": 0.7344, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.395006800933978e-05, |
|
"loss": 0.8781, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.391748409410712e-05, |
|
"loss": 0.8264, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.388482481668232e-05, |
|
"loss": 0.7782, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.385209030717123e-05, |
|
"loss": 0.9151, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3819280695979354e-05, |
|
"loss": 0.7878, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.37863961138114e-05, |
|
"loss": 0.8142, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3753436691670746e-05, |
|
"loss": 0.707, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.372040256085891e-05, |
|
"loss": 0.7494, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.368729385297501e-05, |
|
"loss": 0.9402, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.365411069991528e-05, |
|
"loss": 0.8291, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3620853233872525e-05, |
|
"loss": 0.9221, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3587521587335586e-05, |
|
"loss": 0.775, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.355411589308881e-05, |
|
"loss": 0.8099, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.352063628421155e-05, |
|
"loss": 0.8428, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3487082894077594e-05, |
|
"loss": 0.8306, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.345345585635468e-05, |
|
"loss": 0.9234, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3419755305003904e-05, |
|
"loss": 0.8925, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3385981374279264e-05, |
|
"loss": 0.8707, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3352134198727036e-05, |
|
"loss": 0.7951, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.331821391318532e-05, |
|
"loss": 0.7105, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.328422065278346e-05, |
|
"loss": 0.6983, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.325015455294148e-05, |
|
"loss": 0.7357, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.321601574936962e-05, |
|
"loss": 0.8573, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.3181804378067726e-05, |
|
"loss": 0.9227, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.314752057532475e-05, |
|
"loss": 0.791, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3113164477718174e-05, |
|
"loss": 0.7773, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3078736222113496e-05, |
|
"loss": 0.9276, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.304423594566368e-05, |
|
"loss": 0.7723, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.300966378580858e-05, |
|
"loss": 0.8327, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.297501988027442e-05, |
|
"loss": 0.7469, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.294030436707327e-05, |
|
"loss": 0.6427, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.290551738450241e-05, |
|
"loss": 0.8161, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2870659071143894e-05, |
|
"loss": 0.7147, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2835729565863884e-05, |
|
"loss": 0.8505, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2800729007812196e-05, |
|
"loss": 0.8531, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.2765657536421675e-05, |
|
"loss": 0.7539, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.273051529140767e-05, |
|
"loss": 0.8358, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.269530241276749e-05, |
|
"loss": 0.7724, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.2660019040779805e-05, |
|
"loss": 0.8011, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.262466531600413e-05, |
|
"loss": 0.7952, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.258924137928023e-05, |
|
"loss": 0.7196, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2553747371727606e-05, |
|
"loss": 0.7226, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.251818343474486e-05, |
|
"loss": 0.7577, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.24825497100092e-05, |
|
"loss": 0.7186, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_loss": 0.7108685374259949, |
|
"eval_runtime": 2.7837, |
|
"eval_samples_per_second": 3.233, |
|
"eval_steps_per_second": 0.718, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.244684633947586e-05, |
|
"loss": 0.8418, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.241107346537748e-05, |
|
"loss": 0.8305, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.237523123022365e-05, |
|
"loss": 1.0274, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.233931977680021e-05, |
|
"loss": 0.8431, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.230333924816881e-05, |
|
"loss": 0.8005, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.22672897876662e-05, |
|
"loss": 0.8787, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.223117153890381e-05, |
|
"loss": 0.9091, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2194984645767065e-05, |
|
"loss": 0.7993, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.215872925241486e-05, |
|
"loss": 0.9422, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.212240550327898e-05, |
|
"loss": 0.7923, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2086013543063504e-05, |
|
"loss": 0.8263, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.204955351674428e-05, |
|
"loss": 0.7529, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.201302556956827e-05, |
|
"loss": 1.0105, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.197642984705305e-05, |
|
"loss": 0.7613, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1939766494986174e-05, |
|
"loss": 0.7851, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.190303565942463e-05, |
|
"loss": 0.687, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1866237486694225e-05, |
|
"loss": 0.7759, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.182937212338903e-05, |
|
"loss": 0.8702, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1792439716370786e-05, |
|
"loss": 0.8531, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.175544041276831e-05, |
|
"loss": 0.8694, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1718374359976926e-05, |
|
"loss": 0.7487, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1681241705657865e-05, |
|
"loss": 0.7525, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.164404259773768e-05, |
|
"loss": 0.8515, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.160677718440765e-05, |
|
"loss": 0.7136, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.156944561412323e-05, |
|
"loss": 0.9546, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.1532048035603376e-05, |
|
"loss": 0.6966, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.149458459783004e-05, |
|
"loss": 0.8917, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.1457055450047534e-05, |
|
"loss": 0.7036, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.141946074176192e-05, |
|
"loss": 0.7952, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.138180062274045e-05, |
|
"loss": 0.7388, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.134407524301098e-05, |
|
"loss": 0.8248, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.13062847528613e-05, |
|
"loss": 0.698, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.126842930283862e-05, |
|
"loss": 0.8154, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.123050904374891e-05, |
|
"loss": 0.8472, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1192524126656337e-05, |
|
"loss": 0.946, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1154474702882626e-05, |
|
"loss": 0.8445, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.111636092400652e-05, |
|
"loss": 0.7753, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.107818294186308e-05, |
|
"loss": 0.8108, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.10399409085432e-05, |
|
"loss": 0.7051, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.100163497639288e-05, |
|
"loss": 0.9362, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.096326529801271e-05, |
|
"loss": 0.7638, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.09248320262572e-05, |
|
"loss": 0.7786, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.088633531423423e-05, |
|
"loss": 0.8217, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.084777531530439e-05, |
|
"loss": 0.8158, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.08091521830804e-05, |
|
"loss": 0.7217, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.077046607142646e-05, |
|
"loss": 0.7779, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.07317171344577e-05, |
|
"loss": 0.7776, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.069290552653951e-05, |
|
"loss": 0.8132, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.065403140228693e-05, |
|
"loss": 0.768, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.0615094916564087e-05, |
|
"loss": 0.7917, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.057609622448349e-05, |
|
"loss": 0.7716, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.053703548140551e-05, |
|
"loss": 0.7192, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.049791284293768e-05, |
|
"loss": 0.7797, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.045872846493412e-05, |
|
"loss": 0.8675, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0419482503494896e-05, |
|
"loss": 0.7104, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0380175114965426e-05, |
|
"loss": 0.8268, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.034080645593581e-05, |
|
"loss": 0.8496, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.030137668324024e-05, |
|
"loss": 0.7655, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.026188595395639e-05, |
|
"loss": 0.783, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0222334425404757e-05, |
|
"loss": 0.7517, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.018272225514802e-05, |
|
"loss": 0.7921, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.014304960099048e-05, |
|
"loss": 0.7849, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.010331662097736e-05, |
|
"loss": 0.781, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.0063523473394215e-05, |
|
"loss": 0.8017, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.00236703167663e-05, |
|
"loss": 0.7774, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.998375730985791e-05, |
|
"loss": 0.8126, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.994378461167178e-05, |
|
"loss": 0.7426, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9903752381448465e-05, |
|
"loss": 0.7115, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.986366077866562e-05, |
|
"loss": 0.8825, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9823509963037464e-05, |
|
"loss": 0.8392, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.978330009451411e-05, |
|
"loss": 0.9632, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.97430313332809e-05, |
|
"loss": 0.7299, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.97027038397578e-05, |
|
"loss": 0.7646, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.966231777459875e-05, |
|
"loss": 0.7527, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.962187329869102e-05, |
|
"loss": 0.9925, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.958137057315457e-05, |
|
"loss": 0.8299, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9540809759341426e-05, |
|
"loss": 0.8115, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9500191018835e-05, |
|
"loss": 0.7775, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.945951451344948e-05, |
|
"loss": 0.7463, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.941878040522918e-05, |
|
"loss": 0.912, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.937798885644789e-05, |
|
"loss": 0.8419, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.93371400296082e-05, |
|
"loss": 0.9328, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.929623408744092e-05, |
|
"loss": 0.7434, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.925527119290436e-05, |
|
"loss": 0.7526, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.9214251509183733e-05, |
|
"loss": 0.7491, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.917317519969046e-05, |
|
"loss": 0.7936, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.913204242806158e-05, |
|
"loss": 0.8184, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.909085335815904e-05, |
|
"loss": 0.7352, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.904960815406906e-05, |
|
"loss": 0.7438, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.900830698010149e-05, |
|
"loss": 0.7998, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.896695000078917e-05, |
|
"loss": 0.7039, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.892553738088721e-05, |
|
"loss": 0.9362, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.888406928537242e-05, |
|
"loss": 0.7281, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.884254587944258e-05, |
|
"loss": 0.7993, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.880096732851581e-05, |
|
"loss": 0.6587, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8759333798229955e-05, |
|
"loss": 0.8164, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8717645454441836e-05, |
|
"loss": 0.7832, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.867590246322665e-05, |
|
"loss": 0.8283, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.863410499087731e-05, |
|
"loss": 0.9244, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.859225320390375e-05, |
|
"loss": 0.7304, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_loss": 0.700370192527771, |
|
"eval_runtime": 2.8512, |
|
"eval_samples_per_second": 3.157, |
|
"eval_steps_per_second": 0.701, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8550347269032285e-05, |
|
"loss": 0.8409, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.850838735320494e-05, |
|
"loss": 0.7045, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.84663736235788e-05, |
|
"loss": 0.8243, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.842430624752529e-05, |
|
"loss": 0.7521, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.83821853926296e-05, |
|
"loss": 0.7212, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.834001122668992e-05, |
|
"loss": 0.8555, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.829778391771684e-05, |
|
"loss": 0.748, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.825550363393268e-05, |
|
"loss": 0.7956, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8213170543770724e-05, |
|
"loss": 0.7905, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8170784815874705e-05, |
|
"loss": 0.7764, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.812834661909801e-05, |
|
"loss": 0.8791, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.808585612250305e-05, |
|
"loss": 0.8355, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.8043313495360586e-05, |
|
"loss": 0.7779, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.800071890714906e-05, |
|
"loss": 0.9408, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.79580725275539e-05, |
|
"loss": 0.92, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.791537452646687e-05, |
|
"loss": 0.857, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.787262507398538e-05, |
|
"loss": 0.8673, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7829824340411795e-05, |
|
"loss": 0.8399, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.778697249625279e-05, |
|
"loss": 0.7836, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.774406971221863e-05, |
|
"loss": 1.0116, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.770111615922253e-05, |
|
"loss": 0.8791, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.765811200837994e-05, |
|
"loss": 0.7788, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7615057431007885e-05, |
|
"loss": 0.8853, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.757195259862427e-05, |
|
"loss": 0.7102, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7528797682947206e-05, |
|
"loss": 0.7532, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7485592855894346e-05, |
|
"loss": 0.7091, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.744233828958213e-05, |
|
"loss": 0.8155, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.739903415632518e-05, |
|
"loss": 0.8474, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.735568062863557e-05, |
|
"loss": 0.8274, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.731227787922214e-05, |
|
"loss": 0.8146, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.726882608098984e-05, |
|
"loss": 0.7811, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.722532540703898e-05, |
|
"loss": 0.669, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.718177603066462e-05, |
|
"loss": 0.6793, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.71381781253558e-05, |
|
"loss": 0.7047, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.709453186479491e-05, |
|
"loss": 0.6797, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.705083742285697e-05, |
|
"loss": 0.7729, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.700709497360894e-05, |
|
"loss": 0.8578, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6963304691309026e-05, |
|
"loss": 0.7326, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.691946675040598e-05, |
|
"loss": 0.7918, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6875581325538436e-05, |
|
"loss": 0.7694, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6831648591534166e-05, |
|
"loss": 0.8746, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.678766872340943e-05, |
|
"loss": 0.8269, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.674364189636824e-05, |
|
"loss": 0.8922, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.66995682858017e-05, |
|
"loss": 0.7966, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.665544806728726e-05, |
|
"loss": 0.7073, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.661128141658807e-05, |
|
"loss": 0.8515, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.656706850965225e-05, |
|
"loss": 0.8462, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.652280952261216e-05, |
|
"loss": 0.7801, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.647850463178378e-05, |
|
"loss": 0.8474, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.643415401366591e-05, |
|
"loss": 0.7059, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.638975784493957e-05, |
|
"loss": 0.7343, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.634531630246718e-05, |
|
"loss": 0.7755, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.630082956329197e-05, |
|
"loss": 0.7483, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.6256297804637185e-05, |
|
"loss": 0.7652, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.621172120390544e-05, |
|
"loss": 0.7683, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.616709993867796e-05, |
|
"loss": 0.7455, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.612243418671395e-05, |
|
"loss": 0.6761, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.607772412594978e-05, |
|
"loss": 0.8251, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.6032969934498376e-05, |
|
"loss": 0.7908, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.5988171790648454e-05, |
|
"loss": 0.7453, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.594332987286383e-05, |
|
"loss": 0.812, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.589844435978269e-05, |
|
"loss": 0.7476, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.585351543021691e-05, |
|
"loss": 0.8996, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.58085432631513e-05, |
|
"loss": 0.6811, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.576352803774296e-05, |
|
"loss": 0.7718, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.571846993332048e-05, |
|
"loss": 0.8054, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.567336912938329e-05, |
|
"loss": 0.7993, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.5628225805600914e-05, |
|
"loss": 0.7635, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.558304014181228e-05, |
|
"loss": 0.8685, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.553781231802497e-05, |
|
"loss": 0.7903, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.549254251441451e-05, |
|
"loss": 0.7809, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5447230911323694e-05, |
|
"loss": 0.6816, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.540187768926182e-05, |
|
"loss": 0.7613, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5356483028903976e-05, |
|
"loss": 0.6891, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5311047111090346e-05, |
|
"loss": 0.8444, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5265570116825455e-05, |
|
"loss": 0.7576, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5220052227277467e-05, |
|
"loss": 0.7738, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.517449362377748e-05, |
|
"loss": 0.8101, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.512889448781876e-05, |
|
"loss": 0.7547, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5083255001056074e-05, |
|
"loss": 0.8112, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.503757534530491e-05, |
|
"loss": 0.7774, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.499185570254078e-05, |
|
"loss": 0.7974, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.4946096254898516e-05, |
|
"loss": 0.7291, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.490029718467149e-05, |
|
"loss": 0.833, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.485445867431095e-05, |
|
"loss": 0.7575, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.4808580906425254e-05, |
|
"loss": 0.8991, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4762664063779135e-05, |
|
"loss": 0.7575, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4716708329293e-05, |
|
"loss": 0.7832, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.46707138860422e-05, |
|
"loss": 0.7383, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.462468091725628e-05, |
|
"loss": 0.879, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4578609606318265e-05, |
|
"loss": 0.8216, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.453250013676392e-05, |
|
"loss": 0.6724, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4486352692281035e-05, |
|
"loss": 0.8348, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4440167456708665e-05, |
|
"loss": 0.9279, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.439394461403644e-05, |
|
"loss": 0.7776, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4347684348403766e-05, |
|
"loss": 0.6902, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.430138684409918e-05, |
|
"loss": 0.7233, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.425505228555954e-05, |
|
"loss": 0.8372, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.42086808573693e-05, |
|
"loss": 0.7619, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.416227274425985e-05, |
|
"loss": 0.7765, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"eval_loss": 0.6920838952064514, |
|
"eval_runtime": 2.7184, |
|
"eval_samples_per_second": 3.311, |
|
"eval_steps_per_second": 0.736, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4115828131108665e-05, |
|
"loss": 0.832, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.406934720293865e-05, |
|
"loss": 0.8499, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.402283014491741e-05, |
|
"loss": 0.7741, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.39762771423564e-05, |
|
"loss": 0.7704, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.392968838071036e-05, |
|
"loss": 0.714, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.388306404557642e-05, |
|
"loss": 0.859, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.383640432269346e-05, |
|
"loss": 0.6714, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.378970939794133e-05, |
|
"loss": 0.7763, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.37429794573401e-05, |
|
"loss": 0.7375, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.3696214687049345e-05, |
|
"loss": 0.7542, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.364941527336739e-05, |
|
"loss": 0.7633, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.3602581402730574e-05, |
|
"loss": 0.8715, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.355571326171249e-05, |
|
"loss": 0.925, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.350881103702327e-05, |
|
"loss": 0.8448, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.346187491550883e-05, |
|
"loss": 0.7896, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.34149050841501e-05, |
|
"loss": 0.8109, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.336790173006232e-05, |
|
"loss": 0.7915, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3320865040494266e-05, |
|
"loss": 0.7159, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3273795202827514e-05, |
|
"loss": 0.6816, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.32266924045757e-05, |
|
"loss": 0.7911, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.317955683338376e-05, |
|
"loss": 0.6496, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3132388677027196e-05, |
|
"loss": 0.6928, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.308518812341131e-05, |
|
"loss": 0.741, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.303795536057047e-05, |
|
"loss": 0.8024, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.299069057666736e-05, |
|
"loss": 0.7628, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.294339395999224e-05, |
|
"loss": 0.7485, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.2896065698962145e-05, |
|
"loss": 0.7243, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.2848705982120204e-05, |
|
"loss": 0.9272, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.2801314998134855e-05, |
|
"loss": 0.6828, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.275389293579908e-05, |
|
"loss": 0.8215, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.27064399840297e-05, |
|
"loss": 0.7792, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.265895633186657e-05, |
|
"loss": 0.8034, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.2611442168471817e-05, |
|
"loss": 0.8222, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.256389768312917e-05, |
|
"loss": 0.7199, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.2516323065243135e-05, |
|
"loss": 0.7743, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.246871850433824e-05, |
|
"loss": 0.7197, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.242108419005832e-05, |
|
"loss": 0.8729, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.237342031216574e-05, |
|
"loss": 0.8425, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.23257270605406e-05, |
|
"loss": 0.7835, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.227800462518007e-05, |
|
"loss": 0.7397, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.223025319619755e-05, |
|
"loss": 0.8645, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.2182472963821954e-05, |
|
"loss": 0.8268, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.213466411839693e-05, |
|
"loss": 0.8185, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.208682685038011e-05, |
|
"loss": 0.8279, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.203896135034238e-05, |
|
"loss": 0.75, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.199106780896707e-05, |
|
"loss": 0.781, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.1943146417049236e-05, |
|
"loss": 0.7298, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.189519736549486e-05, |
|
"loss": 0.802, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.184722084532013e-05, |
|
"loss": 0.712, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1799217047650664e-05, |
|
"loss": 0.7124, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.175118616372074e-05, |
|
"loss": 0.7176, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1703128384872535e-05, |
|
"loss": 0.833, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.165504390255538e-05, |
|
"loss": 0.7863, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1606932908324974e-05, |
|
"loss": 0.8052, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.155879559384265e-05, |
|
"loss": 0.7619, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.151063215087456e-05, |
|
"loss": 0.811, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.146244277129099e-05, |
|
"loss": 0.9439, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.141422764706552e-05, |
|
"loss": 0.7671, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.136598697027429e-05, |
|
"loss": 0.772, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.131772093309524e-05, |
|
"loss": 0.9286, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.1269429727807356e-05, |
|
"loss": 0.8236, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.122111354678985e-05, |
|
"loss": 0.7815, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.117277258252147e-05, |
|
"loss": 0.6822, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.112440702757967e-05, |
|
"loss": 0.7596, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.107601707463987e-05, |
|
"loss": 0.6976, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.102760291647469e-05, |
|
"loss": 0.7869, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.097916474595318e-05, |
|
"loss": 0.6983, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.093070275604004e-05, |
|
"loss": 0.6471, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.088221713979485e-05, |
|
"loss": 0.8774, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.083370809037135e-05, |
|
"loss": 0.8367, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.078517580101659e-05, |
|
"loss": 0.8485, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.0736620465070224e-05, |
|
"loss": 0.6802, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.068804227596371e-05, |
|
"loss": 0.7254, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.063944142721954e-05, |
|
"loss": 0.6942, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.0590818112450496e-05, |
|
"loss": 0.7767, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0542172525358833e-05, |
|
"loss": 0.6698, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.049350485973557e-05, |
|
"loss": 0.822, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.044481530945964e-05, |
|
"loss": 0.7565, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0396104068497172e-05, |
|
"loss": 0.7559, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0347371330900727e-05, |
|
"loss": 0.6684, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0298617290808472e-05, |
|
"loss": 0.767, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.024984214244345e-05, |
|
"loss": 0.8836, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.02010460801128e-05, |
|
"loss": 0.8231, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0152229298206973e-05, |
|
"loss": 0.7628, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0103391991198954e-05, |
|
"loss": 0.7916, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0054534353643494e-05, |
|
"loss": 0.8659, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.0005656580176355e-05, |
|
"loss": 0.8002, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.995675886551349e-05, |
|
"loss": 0.7897, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.9907841404450315e-05, |
|
"loss": 0.658, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.985890439186088e-05, |
|
"loss": 0.8164, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.9809948022697158e-05, |
|
"loss": 0.6619, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.9760972491988213e-05, |
|
"loss": 0.7705, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.9711977994839448e-05, |
|
"loss": 0.8147, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.966296472643182e-05, |
|
"loss": 0.6752, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.961393288202107e-05, |
|
"loss": 0.7082, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.9564882656936947e-05, |
|
"loss": 0.8384, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9515814246582402e-05, |
|
"loss": 0.8222, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9466727846432856e-05, |
|
"loss": 0.7317, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9417623652035386e-05, |
|
"loss": 0.8472, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9368501859007948e-05, |
|
"loss": 0.8283, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"eval_loss": 0.688589870929718, |
|
"eval_runtime": 2.8028, |
|
"eval_samples_per_second": 3.211, |
|
"eval_steps_per_second": 0.714, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.931936266303863e-05, |
|
"loss": 0.7819, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.9270206259884815e-05, |
|
"loss": 0.6657, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.922103284537247e-05, |
|
"loss": 0.7602, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.91718426153953e-05, |
|
"loss": 0.6639, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.912263576591402e-05, |
|
"loss": 0.7266, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.907341249295554e-05, |
|
"loss": 0.6518, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.9024172992612214e-05, |
|
"loss": 0.8892, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.897491746104101e-05, |
|
"loss": 0.8929, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8925646094462794e-05, |
|
"loss": 0.9271, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8876359089161498e-05, |
|
"loss": 0.7644, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8827056641483347e-05, |
|
"loss": 0.87, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8777738947836103e-05, |
|
"loss": 0.7929, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8728406204688253e-05, |
|
"loss": 0.7706, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8679058608568232e-05, |
|
"loss": 0.6968, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8629696356063663e-05, |
|
"loss": 0.6761, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.858031964382053e-05, |
|
"loss": 0.7972, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.853092866854245e-05, |
|
"loss": 0.7544, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8481523626989842e-05, |
|
"loss": 0.7911, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8432104715979152e-05, |
|
"loss": 0.821, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.838267213238211e-05, |
|
"loss": 0.7393, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8333226073124893e-05, |
|
"loss": 0.7582, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.828376673518735e-05, |
|
"loss": 0.7258, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.8234294315602245e-05, |
|
"loss": 0.8525, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.818480901145447e-05, |
|
"loss": 0.8147, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.813531101988021e-05, |
|
"loss": 0.7536, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.808580053806622e-05, |
|
"loss": 0.8148, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.8036277763249007e-05, |
|
"loss": 0.7732, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.798674289271404e-05, |
|
"loss": 0.7813, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.793719612379499e-05, |
|
"loss": 0.7391, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7887637653872916e-05, |
|
"loss": 0.9156, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7838067680375495e-05, |
|
"loss": 0.7615, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7788486400776233e-05, |
|
"loss": 0.8723, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7738894012593662e-05, |
|
"loss": 0.7906, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7689290713390598e-05, |
|
"loss": 0.817, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.76396767007733e-05, |
|
"loss": 0.8896, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7590052172390708e-05, |
|
"loss": 0.7621, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7540417325933655e-05, |
|
"loss": 0.8028, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7490772359134093e-05, |
|
"loss": 0.7157, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.744111746976426e-05, |
|
"loss": 0.8162, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7391452855635958e-05, |
|
"loss": 0.7835, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7341778714599708e-05, |
|
"loss": 0.7614, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.729209524454399e-05, |
|
"loss": 0.8477, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.724240264339445e-05, |
|
"loss": 0.7091, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7192701109113105e-05, |
|
"loss": 0.7915, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7142990839697564e-05, |
|
"loss": 0.7741, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7093272033180222e-05, |
|
"loss": 0.6316, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.704354488762751e-05, |
|
"loss": 0.7003, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6993809601139053e-05, |
|
"loss": 0.7132, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6944066371846917e-05, |
|
"loss": 0.7494, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.689431539791482e-05, |
|
"loss": 0.7249, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6844556877537304e-05, |
|
"loss": 0.708, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.679479100893901e-05, |
|
"loss": 0.8788, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6745017990373835e-05, |
|
"loss": 0.7451, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6695238020124146e-05, |
|
"loss": 0.7337, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.664545129650004e-05, |
|
"loss": 0.8061, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6595658017838472e-05, |
|
"loss": 0.7314, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6545858382502547e-05, |
|
"loss": 0.7077, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6496052588880677e-05, |
|
"loss": 0.8084, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.644624083538581e-05, |
|
"loss": 0.8214, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6396423320454627e-05, |
|
"loss": 0.7688, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6346600242546793e-05, |
|
"loss": 0.8176, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6296771800144093e-05, |
|
"loss": 0.8393, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6246938191749704e-05, |
|
"loss": 0.9029, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6197099615887394e-05, |
|
"loss": 0.719, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.614725627110069e-05, |
|
"loss": 0.8233, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6097408355952142e-05, |
|
"loss": 0.7085, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6047556069022493e-05, |
|
"loss": 0.711, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5997699608909913e-05, |
|
"loss": 0.7867, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5947839174229198e-05, |
|
"loss": 0.8321, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.589797496361096e-05, |
|
"loss": 0.7958, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.584810717570087e-05, |
|
"loss": 0.6435, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5798236009158837e-05, |
|
"loss": 0.8112, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.574836166265826e-05, |
|
"loss": 0.7119, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5698484334885153e-05, |
|
"loss": 0.7362, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5648604224537458e-05, |
|
"loss": 0.8092, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.559872153032417e-05, |
|
"loss": 0.6548, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.554883645096459e-05, |
|
"loss": 0.7224, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5498949185187533e-05, |
|
"loss": 0.6886, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5449059931730483e-05, |
|
"loss": 0.7935, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5399168889338897e-05, |
|
"loss": 0.8734, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.534927625676532e-05, |
|
"loss": 0.6876, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5299382232768633e-05, |
|
"loss": 0.7049, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5249487016113288e-05, |
|
"loss": 0.7669, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5199590805568463e-05, |
|
"loss": 0.6549, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.51496937999073e-05, |
|
"loss": 0.8337, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5099796197906106e-05, |
|
"loss": 0.7316, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5049898198343586e-05, |
|
"loss": 0.7525, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8389, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4950101801656416e-05, |
|
"loss": 0.7193, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.490020380209389e-05, |
|
"loss": 0.7287, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4850306200092714e-05, |
|
"loss": 0.6943, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.480040919443155e-05, |
|
"loss": 0.767, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4750512983886718e-05, |
|
"loss": 0.8149, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.470061776723137e-05, |
|
"loss": 0.7885, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4650723743234684e-05, |
|
"loss": 0.6995, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.460083111066111e-05, |
|
"loss": 0.8069, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4550940068269522e-05, |
|
"loss": 0.7725, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.450105081481248e-05, |
|
"loss": 0.8232, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4451163549035412e-05, |
|
"loss": 0.7017, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4401278469675835e-05, |
|
"loss": 0.8166, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"eval_loss": 0.6844746470451355, |
|
"eval_runtime": 2.748, |
|
"eval_samples_per_second": 3.275, |
|
"eval_steps_per_second": 0.728, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4351395775462548e-05, |
|
"loss": 0.84, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4301515665114846e-05, |
|
"loss": 0.7262, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.425163833734175e-05, |
|
"loss": 0.7708, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4201763990841166e-05, |
|
"loss": 0.7251, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.415189282429913e-05, |
|
"loss": 0.7109, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4102025036389046e-05, |
|
"loss": 0.6338, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4052160825770805e-05, |
|
"loss": 0.7663, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4002300391090086e-05, |
|
"loss": 0.8332, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.3952443930977513e-05, |
|
"loss": 0.7448, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.3902591644047867e-05, |
|
"loss": 0.7438, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.385274372889932e-05, |
|
"loss": 0.8371, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.380290038411261e-05, |
|
"loss": 0.8334, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3753061808250295e-05, |
|
"loss": 0.7709, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3703228199855916e-05, |
|
"loss": 0.7632, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3653399757453216e-05, |
|
"loss": 0.831, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3603576679545375e-05, |
|
"loss": 0.748, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3553759164614198e-05, |
|
"loss": 0.8873, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.350394741111933e-05, |
|
"loss": 0.7561, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3454141617497456e-05, |
|
"loss": 0.7786, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.340434198216154e-05, |
|
"loss": 0.7073, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.335454870349997e-05, |
|
"loss": 0.8041, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.330476197987586e-05, |
|
"loss": 0.6945, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3254982009626168e-05, |
|
"loss": 0.73, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.320520899106099e-05, |
|
"loss": 0.7354, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3155443122462695e-05, |
|
"loss": 0.5653, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3105684602085192e-05, |
|
"loss": 0.7855, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.305593362815309e-05, |
|
"loss": 0.8154, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3006190398860953e-05, |
|
"loss": 0.8139, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2956455112372496e-05, |
|
"loss": 0.7663, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2906727966819777e-05, |
|
"loss": 0.684, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.285700916030244e-05, |
|
"loss": 0.8083, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2807298890886905e-05, |
|
"loss": 0.733, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2757597356605552e-05, |
|
"loss": 0.8283, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2707904755456015e-05, |
|
"loss": 0.7032, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2658221285400294e-05, |
|
"loss": 0.6825, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2608547144364048e-05, |
|
"loss": 0.9023, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2558882530235753e-05, |
|
"loss": 0.6826, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2509227640865923e-05, |
|
"loss": 0.7528, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2459582674066355e-05, |
|
"loss": 0.747, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2409947827609298e-05, |
|
"loss": 0.8042, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2360323299226704e-05, |
|
"loss": 0.6112, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2310709286609398e-05, |
|
"loss": 0.7995, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.226110598740634e-05, |
|
"loss": 0.7816, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.221151359922378e-05, |
|
"loss": 0.7245, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.216193231962451e-05, |
|
"loss": 0.8434, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2112362346127093e-05, |
|
"loss": 0.6944, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2062803876205012e-05, |
|
"loss": 0.6996, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2013257107285963e-05, |
|
"loss": 0.6934, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.1963722236751002e-05, |
|
"loss": 0.8258, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1914199461933783e-05, |
|
"loss": 0.675, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1864688980119795e-05, |
|
"loss": 0.7469, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1815190988545532e-05, |
|
"loss": 0.8251, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1765705684397754e-05, |
|
"loss": 0.7384, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.171623326481265e-05, |
|
"loss": 0.7057, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1666773926875123e-05, |
|
"loss": 0.747, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1617327867617892e-05, |
|
"loss": 0.7404, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.156789528402085e-05, |
|
"loss": 0.7471, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1518476373010167e-05, |
|
"loss": 0.6522, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1469071331457556e-05, |
|
"loss": 0.7264, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.1419680356179478e-05, |
|
"loss": 0.8483, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.1370303643936347e-05, |
|
"loss": 0.7971, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.1320941391431777e-05, |
|
"loss": 0.7694, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.127159379531175e-05, |
|
"loss": 0.7246, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.12222610521639e-05, |
|
"loss": 0.7312, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.1172943358516652e-05, |
|
"loss": 0.5753, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.112364091083851e-05, |
|
"loss": 0.7694, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.1074353905537216e-05, |
|
"loss": 0.942, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.1025082538958994e-05, |
|
"loss": 0.6571, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.0975827007387795e-05, |
|
"loss": 0.8069, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.0926587507044458e-05, |
|
"loss": 0.7079, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0877364234085984e-05, |
|
"loss": 0.7603, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.082815738460471e-05, |
|
"loss": 0.7821, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0778967154627537e-05, |
|
"loss": 0.7098, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0729793740115187e-05, |
|
"loss": 0.7475, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0680637336961373e-05, |
|
"loss": 0.7824, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.063149814099205e-05, |
|
"loss": 0.7515, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.0582376347964613e-05, |
|
"loss": 0.6972, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.053327215356715e-05, |
|
"loss": 0.7595, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.0484185753417604e-05, |
|
"loss": 0.6972, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.043511734306306e-05, |
|
"loss": 0.7549, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0386067117978934e-05, |
|
"loss": 0.8029, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.033703527356818e-05, |
|
"loss": 0.7691, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.028802200516056e-05, |
|
"loss": 0.6449, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0239027508011796e-05, |
|
"loss": 0.7241, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0190051977302845e-05, |
|
"loss": 0.7877, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.0141095608139125e-05, |
|
"loss": 0.8097, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.009215859554969e-05, |
|
"loss": 0.7039, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.0043241134486508e-05, |
|
"loss": 0.843, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.999434341982365e-05, |
|
"loss": 0.7726, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.9945465646356512e-05, |
|
"loss": 0.8044, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.9896608008801056e-05, |
|
"loss": 0.6961, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.9847770701793032e-05, |
|
"loss": 0.7592, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.97989539198872e-05, |
|
"loss": 0.8141, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.975015785755655e-05, |
|
"loss": 0.708, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.970138270919154e-05, |
|
"loss": 0.6981, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.965262866909928e-05, |
|
"loss": 0.761, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.960389593150283e-05, |
|
"loss": 0.7069, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9555184690540366e-05, |
|
"loss": 0.9216, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9506495140264434e-05, |
|
"loss": 0.741, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.945782747464116e-05, |
|
"loss": 0.7148, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"eval_loss": 0.675603449344635, |
|
"eval_runtime": 2.7591, |
|
"eval_samples_per_second": 3.262, |
|
"eval_steps_per_second": 0.725, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9409181887549513e-05, |
|
"loss": 0.85, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9360558572780468e-05, |
|
"loss": 0.706, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.93119577240363e-05, |
|
"loss": 0.7436, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9263379534929782e-05, |
|
"loss": 0.667, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9214824198983408e-05, |
|
"loss": 0.8387, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.9166291909628653e-05, |
|
"loss": 0.8664, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9117782860205152e-05, |
|
"loss": 0.7756, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9069297243959965e-05, |
|
"loss": 0.7163, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.9020835254046823e-05, |
|
"loss": 0.7515, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.8972397083525306e-05, |
|
"loss": 0.784, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.892398292536013e-05, |
|
"loss": 0.6658, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.887559297242034e-05, |
|
"loss": 0.7117, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8827227417478534e-05, |
|
"loss": 0.8233, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8778886453210155e-05, |
|
"loss": 0.8324, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8730570272192653e-05, |
|
"loss": 0.8274, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.8682279066904762e-05, |
|
"loss": 0.7631, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.863401302972571e-05, |
|
"loss": 0.8127, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8585772352934493e-05, |
|
"loss": 0.6535, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8537557228709012e-05, |
|
"loss": 0.6603, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8489367849125443e-05, |
|
"loss": 0.7281, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8441204406157357e-05, |
|
"loss": 0.6857, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.8393067091675028e-05, |
|
"loss": 0.6996, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8344956097444633e-05, |
|
"loss": 0.8077, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8296871615127474e-05, |
|
"loss": 0.7591, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.824881383627927e-05, |
|
"loss": 0.7829, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8200782952349342e-05, |
|
"loss": 0.707, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.8152779154679877e-05, |
|
"loss": 0.7687, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8104802634505146e-05, |
|
"loss": 0.7371, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.805685358295078e-05, |
|
"loss": 0.9469, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.8008932191032937e-05, |
|
"loss": 0.8391, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7961038649657625e-05, |
|
"loss": 0.7258, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7913173149619896e-05, |
|
"loss": 0.7722, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.7865335881603078e-05, |
|
"loss": 0.7824, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.7817527036178052e-05, |
|
"loss": 0.7579, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.776974680380246e-05, |
|
"loss": 0.78, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.7721995374819933e-05, |
|
"loss": 0.7067, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.767427293945941e-05, |
|
"loss": 0.7355, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.7626579687834268e-05, |
|
"loss": 0.7999, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.757891580994168e-05, |
|
"loss": 0.7543, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.7531281495661757e-05, |
|
"loss": 0.8261, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.748367693475687e-05, |
|
"loss": 0.682, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.7436102316870837e-05, |
|
"loss": 0.7408, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.738855783152819e-05, |
|
"loss": 0.6882, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.7341043668133446e-05, |
|
"loss": 0.8076, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.7293560015970295e-05, |
|
"loss": 0.8133, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.724610706420092e-05, |
|
"loss": 0.824, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.7198685001865157e-05, |
|
"loss": 0.7343, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.7151294017879805e-05, |
|
"loss": 0.6586, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.7103934301037865e-05, |
|
"loss": 0.6814, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.7056606040007767e-05, |
|
"loss": 0.725, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.7009309423332638e-05, |
|
"loss": 0.7711, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.6962044639429537e-05, |
|
"loss": 0.7651, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.69148118765887e-05, |
|
"loss": 0.7271, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.686761132297281e-05, |
|
"loss": 0.7408, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.6820443166616246e-05, |
|
"loss": 0.7473, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.6773307595424304e-05, |
|
"loss": 0.8045, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.6726204797172492e-05, |
|
"loss": 0.8498, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.667913495950575e-05, |
|
"loss": 0.8384, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 1.663209826993769e-05, |
|
"loss": 0.9495, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.6585094915849907e-05, |
|
"loss": 0.7616, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.6538125084491172e-05, |
|
"loss": 0.9388, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.6491188962976727e-05, |
|
"loss": 0.7574, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.644428673828751e-05, |
|
"loss": 0.8715, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 1.6397418597269436e-05, |
|
"loss": 0.7779, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6350584726632616e-05, |
|
"loss": 0.8231, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6303785312950658e-05, |
|
"loss": 0.6981, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6257020542659906e-05, |
|
"loss": 0.8721, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.621029060205867e-05, |
|
"loss": 0.7892, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.6163595677306544e-05, |
|
"loss": 0.7994, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 1.611693595442359e-05, |
|
"loss": 0.7794, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.607031161928965e-05, |
|
"loss": 0.7409, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.6023722857643608e-05, |
|
"loss": 0.7802, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.59771698550826e-05, |
|
"loss": 0.8131, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.5930652797061346e-05, |
|
"loss": 0.8236, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 1.5884171868891344e-05, |
|
"loss": 0.8097, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.5837727255740155e-05, |
|
"loss": 0.8256, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.57913191426307e-05, |
|
"loss": 0.7781, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.5744947714440467e-05, |
|
"loss": 0.7512, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.569861315590082e-05, |
|
"loss": 0.7313, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 1.565231565159623e-05, |
|
"loss": 0.7801, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.560605538596357e-05, |
|
"loss": 0.8773, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.5559832543291337e-05, |
|
"loss": 0.8349, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.5513647307718967e-05, |
|
"loss": 0.8868, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.5467499863236086e-05, |
|
"loss": 0.6488, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.5421390393681737e-05, |
|
"loss": 0.7423, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.537531908274372e-05, |
|
"loss": 0.8946, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.5329286113957808e-05, |
|
"loss": 0.7254, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.5283291670707012e-05, |
|
"loss": 0.7644, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.5237335936220873e-05, |
|
"loss": 0.5964, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.5191419093574754e-05, |
|
"loss": 0.7413, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 1.5145541325689047e-05, |
|
"loss": 0.8225, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5099702815328515e-05, |
|
"loss": 0.8126, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5053903745101497e-05, |
|
"loss": 0.8222, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.5008144297459226e-05, |
|
"loss": 0.7145, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.49624246546951e-05, |
|
"loss": 0.9004, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 1.4916744998943932e-05, |
|
"loss": 0.7225, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4871105512181244e-05, |
|
"loss": 0.7156, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4825506376222533e-05, |
|
"loss": 0.7576, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4779947772722541e-05, |
|
"loss": 0.7516, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4734429883174556e-05, |
|
"loss": 0.6625, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"eval_loss": 0.6701757907867432, |
|
"eval_runtime": 2.7584, |
|
"eval_samples_per_second": 3.263, |
|
"eval_steps_per_second": 0.725, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 1.4688952888909658e-05, |
|
"loss": 0.9485, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.4643516971096022e-05, |
|
"loss": 0.7172, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.4598122310738177e-05, |
|
"loss": 0.8178, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.455276908867631e-05, |
|
"loss": 0.7291, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.4507457485585502e-05, |
|
"loss": 0.7454, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 1.4462187681975048e-05, |
|
"loss": 0.7785, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4416959858187723e-05, |
|
"loss": 0.7644, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4371774194399085e-05, |
|
"loss": 0.6941, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4326630870616714e-05, |
|
"loss": 0.7953, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4281530066679522e-05, |
|
"loss": 0.7388, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4236471962257045e-05, |
|
"loss": 0.7571, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 1.4191456736848702e-05, |
|
"loss": 0.7233, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.4146484569783103e-05, |
|
"loss": 0.7986, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.410155564021731e-05, |
|
"loss": 0.763, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.4056670127136181e-05, |
|
"loss": 0.8311, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.4011828209351552e-05, |
|
"loss": 0.7129, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 1.3967030065501634e-05, |
|
"loss": 0.8807, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.3922275874050222e-05, |
|
"loss": 0.7498, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.3877565813286059e-05, |
|
"loss": 0.7875, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.383290006132204e-05, |
|
"loss": 0.776, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.3788278796094572e-05, |
|
"loss": 0.7289, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.3743702195362824e-05, |
|
"loss": 0.755, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3699170436708042e-05, |
|
"loss": 0.6786, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3654683697532828e-05, |
|
"loss": 0.744, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3610242155060435e-05, |
|
"loss": 0.722, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3565845986334088e-05, |
|
"loss": 0.8039, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 1.3521495368216236e-05, |
|
"loss": 0.7784, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3477190477387843e-05, |
|
"loss": 0.7962, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.343293149034776e-05, |
|
"loss": 0.7224, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3388718583411932e-05, |
|
"loss": 0.6494, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3344551932712746e-05, |
|
"loss": 0.8496, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3300431714198303e-05, |
|
"loss": 0.7302, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 1.3256358103631766e-05, |
|
"loss": 0.8067, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3212331276590584e-05, |
|
"loss": 0.7495, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3168351408465835e-05, |
|
"loss": 0.6902, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.312441867446157e-05, |
|
"loss": 0.6667, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3080533249594024e-05, |
|
"loss": 0.6851, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 1.3036695308690983e-05, |
|
"loss": 0.7095, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.2992905026391064e-05, |
|
"loss": 0.806, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.2949162577143031e-05, |
|
"loss": 0.7883, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.2905468135205096e-05, |
|
"loss": 0.707, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.28618218746442e-05, |
|
"loss": 0.7659, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 1.2818223969335386e-05, |
|
"loss": 0.8123, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2774674592961033e-05, |
|
"loss": 0.7448, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.273117391901017e-05, |
|
"loss": 0.6221, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2687722120777861e-05, |
|
"loss": 0.6408, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2644319371364439e-05, |
|
"loss": 0.7091, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 1.2600965843674825e-05, |
|
"loss": 0.7801, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.2557661710417868e-05, |
|
"loss": 0.6927, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.251440714410566e-05, |
|
"loss": 0.8319, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.2471202317052796e-05, |
|
"loss": 0.6475, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.2428047401375743e-05, |
|
"loss": 0.6783, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.238494256899212e-05, |
|
"loss": 0.7294, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 1.2341887991620063e-05, |
|
"loss": 0.7516, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.2298883840777472e-05, |
|
"loss": 0.7799, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.2255930287781373e-05, |
|
"loss": 0.7467, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.2213027503747214e-05, |
|
"loss": 0.7212, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.217017565958821e-05, |
|
"loss": 0.7798, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.2127374926014628e-05, |
|
"loss": 0.8727, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.2084625473533128e-05, |
|
"loss": 0.76, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.2041927472446112e-05, |
|
"loss": 0.6707, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.1999281092850956e-05, |
|
"loss": 0.7354, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.195668650463942e-05, |
|
"loss": 0.6592, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 1.1914143877496956e-05, |
|
"loss": 0.6748, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1871653380901992e-05, |
|
"loss": 0.7085, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1829215184125298e-05, |
|
"loss": 0.7195, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1786829456229282e-05, |
|
"loss": 0.8378, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1744496366067336e-05, |
|
"loss": 0.7401, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 1.1702216082283162e-05, |
|
"loss": 0.7453, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.165998877331008e-05, |
|
"loss": 0.7991, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.1617814607370403e-05, |
|
"loss": 0.7911, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.157569375247471e-05, |
|
"loss": 0.665, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.1533626376421205e-05, |
|
"loss": 0.873, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.1491612646795058e-05, |
|
"loss": 0.7748, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 1.1449652730967719e-05, |
|
"loss": 0.7746, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.1407746796096255e-05, |
|
"loss": 0.6605, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.1365895009122688e-05, |
|
"loss": 0.6991, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.1324097536773348e-05, |
|
"loss": 0.7149, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.1282354545558176e-05, |
|
"loss": 0.7596, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 1.1240666201770047e-05, |
|
"loss": 0.7748, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1199032671484186e-05, |
|
"loss": 0.775, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1157454120557429e-05, |
|
"loss": 0.7211, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1115930714627584e-05, |
|
"loss": 0.7309, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1074462619112791e-05, |
|
"loss": 0.6865, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 1.1033049999210834e-05, |
|
"loss": 0.7214, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.0991693019898511e-05, |
|
"loss": 0.7503, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.0950391845930949e-05, |
|
"loss": 0.7297, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.0909146641840965e-05, |
|
"loss": 0.7845, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.0867957571938422e-05, |
|
"loss": 0.7873, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 1.0826824800309555e-05, |
|
"loss": 0.8543, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0785748490816277e-05, |
|
"loss": 0.7349, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0744728807095648e-05, |
|
"loss": 0.7081, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0703765912559086e-05, |
|
"loss": 0.7484, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.06628599703918e-05, |
|
"loss": 0.6553, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0622011143552107e-05, |
|
"loss": 0.7759, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.0581219594770821e-05, |
|
"loss": 0.7877, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.0540485486550528e-05, |
|
"loss": 0.6765, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.0499808981165005e-05, |
|
"loss": 0.7901, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.0459190240658582e-05, |
|
"loss": 0.6882, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.0418629426845433e-05, |
|
"loss": 0.7302, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"eval_loss": 0.6691840887069702, |
|
"eval_runtime": 2.7632, |
|
"eval_samples_per_second": 3.257, |
|
"eval_steps_per_second": 0.724, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1.0378126701308988e-05, |
|
"loss": 0.8246, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.0337682225401254e-05, |
|
"loss": 0.644, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.0297296160242206e-05, |
|
"loss": 0.6603, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.0256968666719105e-05, |
|
"loss": 0.6493, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.0216699905485889e-05, |
|
"loss": 0.6212, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 1.0176490036962533e-05, |
|
"loss": 0.6607, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.0136339221334393e-05, |
|
"loss": 0.6983, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.0096247618551541e-05, |
|
"loss": 0.7993, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.0056215388328214e-05, |
|
"loss": 0.765, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 1.0016242690142094e-05, |
|
"loss": 0.7203, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 9.97632968323371e-06, |
|
"loss": 0.7775, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.936476526605784e-06, |
|
"loss": 0.8338, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.896683379022651e-06, |
|
"loss": 0.8009, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.856950399009532e-06, |
|
"loss": 0.6568, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.81727774485198e-06, |
|
"loss": 0.8037, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.777665574595246e-06, |
|
"loss": 0.9051, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.738114046043606e-06, |
|
"loss": 0.6952, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.698623316759758e-06, |
|
"loss": 0.6797, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.659193544064198e-06, |
|
"loss": 0.6995, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.619824885034582e-06, |
|
"loss": 0.769, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.580517496505107e-06, |
|
"loss": 0.8671, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 9.541271535065888e-06, |
|
"loss": 0.6591, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.50208715706232e-06, |
|
"loss": 0.7139, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.46296451859449e-06, |
|
"loss": 0.6103, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.423903775516518e-06, |
|
"loss": 0.8666, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.38490508343592e-06, |
|
"loss": 0.707, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 9.34596859771307e-06, |
|
"loss": 0.7321, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.307094473460499e-06, |
|
"loss": 0.8915, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.268282865542303e-06, |
|
"loss": 0.8282, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.229533928573545e-06, |
|
"loss": 0.8228, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.190847816919612e-06, |
|
"loss": 0.6513, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 9.152224684695614e-06, |
|
"loss": 0.7928, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.113664685765769e-06, |
|
"loss": 0.7345, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.075167973742802e-06, |
|
"loss": 0.7776, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 9.036734701987295e-06, |
|
"loss": 0.7742, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.99836502360712e-06, |
|
"loss": 0.7557, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.960059091456802e-06, |
|
"loss": 0.7659, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.921817058136916e-06, |
|
"loss": 0.7884, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.883639075993488e-06, |
|
"loss": 0.7221, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.845525297117369e-06, |
|
"loss": 0.7502, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.807475873343666e-06, |
|
"loss": 0.6577, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.7694909562511e-06, |
|
"loss": 0.7325, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 8.731570697161381e-06, |
|
"loss": 0.6415, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.6937152471387e-06, |
|
"loss": 0.6961, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.655924756989025e-06, |
|
"loss": 0.6455, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.618199377259553e-06, |
|
"loss": 0.7383, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.580539258238084e-06, |
|
"loss": 0.7046, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 8.542944549952481e-06, |
|
"loss": 0.6592, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.505415402169967e-06, |
|
"loss": 0.7279, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.467951964396632e-06, |
|
"loss": 0.6761, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.430554385876772e-06, |
|
"loss": 0.7188, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.393222815592344e-06, |
|
"loss": 0.7333, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.355957402262332e-06, |
|
"loss": 0.7773, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.318758294342139e-06, |
|
"loss": 0.649, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.28162564002308e-06, |
|
"loss": 0.7533, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.244559587231695e-06, |
|
"loss": 0.7502, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.207560283629223e-06, |
|
"loss": 0.7971, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.17062787661097e-06, |
|
"loss": 0.7257, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 8.133762513305784e-06, |
|
"loss": 0.6843, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.09696434057538e-06, |
|
"loss": 0.7508, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.060233505013826e-06, |
|
"loss": 0.7229, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.023570152946952e-06, |
|
"loss": 0.7352, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.986974430431732e-06, |
|
"loss": 0.7093, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.950446483255727e-06, |
|
"loss": 0.81, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.913986456936495e-06, |
|
"loss": 0.7186, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.877594496721028e-06, |
|
"loss": 0.6911, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.841270747585144e-06, |
|
"loss": 0.7206, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.805015354232934e-06, |
|
"loss": 0.729, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.768828461096189e-06, |
|
"loss": 0.763, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.7327102123338e-06, |
|
"loss": 0.755, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.6966607518312e-06, |
|
"loss": 0.6827, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.660680223199786e-06, |
|
"loss": 0.6989, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.624768769776356e-06, |
|
"loss": 0.6037, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.588926534622523e-06, |
|
"loss": 0.7307, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.553153660524148e-06, |
|
"loss": 0.7278, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.51745028999081e-06, |
|
"loss": 0.8089, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.481816565255153e-06, |
|
"loss": 0.8623, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.446252628272401e-06, |
|
"loss": 0.6424, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.4107586207197684e-06, |
|
"loss": 0.7238, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 7.3753346839958745e-06, |
|
"loss": 0.7494, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.3399809592202e-06, |
|
"loss": 0.6828, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.304697587232518e-06, |
|
"loss": 0.8893, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.269484708592333e-06, |
|
"loss": 0.7394, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.234342463578334e-06, |
|
"loss": 0.7413, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.199270992187812e-06, |
|
"loss": 0.7484, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.164270434136114e-06, |
|
"loss": 0.7831, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.12934092885611e-06, |
|
"loss": 0.642, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.094482615497594e-06, |
|
"loss": 0.7385, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.059695632926733e-06, |
|
"loss": 0.7926, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.024980119725574e-06, |
|
"loss": 0.797, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.990336214191426e-06, |
|
"loss": 0.7751, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.955764054336325e-06, |
|
"loss": 0.8491, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.921263777886508e-06, |
|
"loss": 0.7464, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.886835522281837e-06, |
|
"loss": 0.8806, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.852479424675265e-06, |
|
"loss": 0.7217, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 6.818195621932277e-06, |
|
"loss": 0.681, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.783984250630387e-06, |
|
"loss": 0.9295, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.749845447058525e-06, |
|
"loss": 0.7381, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.71577934721655e-06, |
|
"loss": 0.6876, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.681786086814681e-06, |
|
"loss": 0.7515, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"eval_loss": 0.6668347120285034, |
|
"eval_runtime": 2.7968, |
|
"eval_samples_per_second": 3.218, |
|
"eval_steps_per_second": 0.715, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 6.647865801272968e-06, |
|
"loss": 0.7517, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.614018625720744e-06, |
|
"loss": 0.8264, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.580244694996094e-06, |
|
"loss": 0.6438, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.546544143645325e-06, |
|
"loss": 0.8277, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.512917105922414e-06, |
|
"loss": 0.8046, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 6.4793637157884555e-06, |
|
"loss": 0.6935, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.445884106911193e-06, |
|
"loss": 0.7991, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.412478412664422e-06, |
|
"loss": 0.7225, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.37914676612748e-06, |
|
"loss": 0.8946, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.345889300084717e-06, |
|
"loss": 0.7647, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 6.312706147025002e-06, |
|
"loss": 0.7763, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.279597439141102e-06, |
|
"loss": 0.7455, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.2465633083292535e-06, |
|
"loss": 0.7419, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.213603886188599e-06, |
|
"loss": 0.8038, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.180719304020649e-06, |
|
"loss": 0.6767, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.1479096928287794e-06, |
|
"loss": 0.7189, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 6.115175183317676e-06, |
|
"loss": 0.7532, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.082515905892886e-06, |
|
"loss": 0.768, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.049931990660218e-06, |
|
"loss": 0.7166, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 6.017423567425268e-06, |
|
"loss": 0.6287, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 5.98499076569288e-06, |
|
"loss": 0.6423, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 5.952633714666681e-06, |
|
"loss": 0.5794, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.920352543248475e-06, |
|
"loss": 0.7575, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.888147380037806e-06, |
|
"loss": 0.7018, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.856018353331436e-06, |
|
"loss": 0.7998, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.823965591122804e-06, |
|
"loss": 0.7905, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 5.791989221101527e-06, |
|
"loss": 0.79, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.760089370652908e-06, |
|
"loss": 0.6752, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.7282661668574076e-06, |
|
"loss": 0.7673, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.696519736490153e-06, |
|
"loss": 0.6974, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.6648502060204116e-06, |
|
"loss": 0.7497, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 5.633257701611133e-06, |
|
"loss": 0.6925, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.601742349118391e-06, |
|
"loss": 0.7451, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.570304274090909e-06, |
|
"loss": 0.8092, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.538943601769564e-06, |
|
"loss": 0.7423, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.5076604570868775e-06, |
|
"loss": 0.8235, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.476454964666522e-06, |
|
"loss": 0.716, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 5.4453272488228204e-06, |
|
"loss": 0.7266, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.41427743356028e-06, |
|
"loss": 0.7567, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.383305642573028e-06, |
|
"loss": 0.7154, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.352411999244394e-06, |
|
"loss": 0.8954, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.3215966266463914e-06, |
|
"loss": 0.7687, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 5.290859647539206e-06, |
|
"loss": 0.6535, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.260201184370741e-06, |
|
"loss": 0.6677, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.229621359276101e-06, |
|
"loss": 0.7645, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.199120294077122e-06, |
|
"loss": 0.7829, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.168698110281886e-06, |
|
"loss": 0.8645, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 5.138354929084224e-06, |
|
"loss": 0.6717, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.108090871363244e-06, |
|
"loss": 0.6911, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.077906057682863e-06, |
|
"loss": 0.7263, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.047800608291292e-06, |
|
"loss": 0.7412, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 5.017774643120568e-06, |
|
"loss": 0.7007, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 4.987828281786116e-06, |
|
"loss": 0.7248, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.957961643586215e-06, |
|
"loss": 0.7184, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.928174847501554e-06, |
|
"loss": 0.8302, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.898468012194751e-06, |
|
"loss": 0.7182, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.8688412560099065e-06, |
|
"loss": 0.8429, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.839294696972061e-06, |
|
"loss": 0.7214, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 4.809828452786794e-06, |
|
"loss": 0.6767, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.780442640839755e-06, |
|
"loss": 0.7173, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.751137378196138e-06, |
|
"loss": 0.6402, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.721912781600265e-06, |
|
"loss": 0.6689, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.692768967475108e-06, |
|
"loss": 0.7374, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 4.663706051921821e-06, |
|
"loss": 0.7785, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.6347241507192745e-06, |
|
"loss": 0.6506, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.605823379323598e-06, |
|
"loss": 0.6404, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.5770038528677424e-06, |
|
"loss": 0.6754, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.548265686160988e-06, |
|
"loss": 0.75, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 4.519608993688473e-06, |
|
"loss": 0.7818, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.49103388961081e-06, |
|
"loss": 0.8514, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.462540487763559e-06, |
|
"loss": 0.7249, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.4341289016568005e-06, |
|
"loss": 0.7647, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.4057992444746845e-06, |
|
"loss": 0.7499, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 4.377551629074997e-06, |
|
"loss": 0.8054, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.349386167988659e-06, |
|
"loss": 0.7767, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.321302973419328e-06, |
|
"loss": 0.6969, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.293302157242943e-06, |
|
"loss": 0.7513, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.2653838310072525e-06, |
|
"loss": 0.8205, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.237548105931399e-06, |
|
"loss": 0.7035, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 4.209795092905452e-06, |
|
"loss": 0.753, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.1821249024899925e-06, |
|
"loss": 0.7195, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.1545376449156455e-06, |
|
"loss": 0.6893, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.127033430082664e-06, |
|
"loss": 0.8706, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.099612367560465e-06, |
|
"loss": 0.6905, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 4.072274566587245e-06, |
|
"loss": 0.6468, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.045020136069463e-06, |
|
"loss": 0.8398, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.017849184581473e-06, |
|
"loss": 0.7046, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.990761820365088e-06, |
|
"loss": 0.7156, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.963758151329106e-06, |
|
"loss": 0.8128, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 3.936838285048914e-06, |
|
"loss": 0.7135, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.910002328766052e-06, |
|
"loss": 0.817, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.883250389387791e-06, |
|
"loss": 0.7037, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.856582573486689e-06, |
|
"loss": 0.7085, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.829998987300182e-06, |
|
"loss": 0.7067, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 3.8034997367301733e-06, |
|
"loss": 0.6197, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.777084927342575e-06, |
|
"loss": 0.7071, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.7507546643669178e-06, |
|
"loss": 0.7371, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.724509052695921e-06, |
|
"loss": 0.7281, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.6983481968850738e-06, |
|
"loss": 0.7821, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.672272201152219e-06, |
|
"loss": 0.7601, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"eval_loss": 0.6660656332969666, |
|
"eval_runtime": 2.7382, |
|
"eval_samples_per_second": 3.287, |
|
"eval_steps_per_second": 0.73, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 3.6462811693771413e-06, |
|
"loss": 0.6098, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.620375205101165e-06, |
|
"loss": 0.8173, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.594554411526713e-06, |
|
"loss": 0.7276, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.568818891516898e-06, |
|
"loss": 0.6734, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.54316874759516e-06, |
|
"loss": 0.695, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 3.5176040819448003e-06, |
|
"loss": 0.6768, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.4921249964086024e-06, |
|
"loss": 0.7032, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.4667315924884204e-06, |
|
"loss": 0.8767, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.441423971344779e-06, |
|
"loss": 0.6643, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.4162022337964646e-06, |
|
"loss": 0.8401, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 3.391066480320121e-06, |
|
"loss": 0.6597, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.3660168110498743e-06, |
|
"loss": 0.7097, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.341053325776894e-06, |
|
"loss": 0.6916, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.3161761239490276e-06, |
|
"loss": 0.7508, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.291385304670369e-06, |
|
"loss": 0.7143, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.2666809667009276e-06, |
|
"loss": 0.7629, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 3.242063208456159e-06, |
|
"loss": 0.711, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.2175321280066257e-06, |
|
"loss": 0.8207, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.1930878230775784e-06, |
|
"loss": 0.7556, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.168730391048602e-06, |
|
"loss": 0.7198, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.1444599289531666e-06, |
|
"loss": 0.729, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 3.1202765334782944e-06, |
|
"loss": 0.7024, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.0961803009641744e-06, |
|
"loss": 0.7667, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.0721713274037334e-06, |
|
"loss": 0.8017, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.048249708442302e-06, |
|
"loss": 0.7625, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.024415539377201e-06, |
|
"loss": 0.6569, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 3.000668915157381e-06, |
|
"loss": 0.7562, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.9770099303830345e-06, |
|
"loss": 0.7837, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.9534386793052175e-06, |
|
"loss": 0.6871, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.929955255825495e-06, |
|
"loss": 0.7128, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.9065597534955412e-06, |
|
"loss": 0.6519, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 2.883252265516756e-06, |
|
"loss": 0.6933, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.8600328847399493e-06, |
|
"loss": 0.7242, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.8369017036649097e-06, |
|
"loss": 0.7279, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.8138588144400706e-06, |
|
"loss": 0.8037, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.7909043088621328e-06, |
|
"loss": 0.8595, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.768038278375712e-06, |
|
"loss": 0.8126, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 2.7452608140729365e-06, |
|
"loss": 0.7693, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.7225720066931303e-06, |
|
"loss": 0.6596, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.699971946622434e-06, |
|
"loss": 0.7596, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.6774607238934352e-06, |
|
"loss": 0.7592, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.6550384281848167e-06, |
|
"loss": 0.7024, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 2.6327051488209992e-06, |
|
"loss": 0.8019, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.6104609747717943e-06, |
|
"loss": 0.7155, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.5883059946520292e-06, |
|
"loss": 0.7625, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.5662402967212174e-06, |
|
"loss": 0.7014, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.5442639688831925e-06, |
|
"loss": 0.7138, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 2.5223770986857633e-06, |
|
"loss": 0.6748, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.5005797733203636e-06, |
|
"loss": 0.7, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.478872079621686e-06, |
|
"loss": 0.7125, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.4572541040673884e-06, |
|
"loss": 0.7584, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.435725932777691e-06, |
|
"loss": 0.7339, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 2.414287651515065e-06, |
|
"loss": 0.628, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3929393456838867e-06, |
|
"loss": 0.6831, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3716811003300863e-06, |
|
"loss": 0.808, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.350513000140822e-06, |
|
"loss": 0.6957, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.329435129444138e-06, |
|
"loss": 0.7579, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.3084475722086356e-06, |
|
"loss": 0.709, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 2.2875504120431187e-06, |
|
"loss": 0.7982, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.2667437321962805e-06, |
|
"loss": 0.7208, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.2460276155563593e-06, |
|
"loss": 0.8464, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.2254021446508204e-06, |
|
"loss": 0.7632, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.2048674016460215e-06, |
|
"loss": 0.8267, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 2.1844234683468716e-06, |
|
"loss": 0.7138, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.1640704261965377e-06, |
|
"loss": 0.6624, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.143808356276092e-06, |
|
"loss": 0.7997, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.1236373393041826e-06, |
|
"loss": 0.8738, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.1035574556367566e-06, |
|
"loss": 0.7874, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 2.0835687852666875e-06, |
|
"loss": 0.6902, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.0636714078234892e-06, |
|
"loss": 0.7025, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.043865402572978e-06, |
|
"loss": 0.7576, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.0241508484169948e-06, |
|
"loss": 0.8543, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 2.0045278238930292e-06, |
|
"loss": 0.6842, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.9849964071739584e-06, |
|
"loss": 0.7801, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.9655566760677256e-06, |
|
"loss": 0.7252, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.946208708017011e-06, |
|
"loss": 0.762, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.926952580098945e-06, |
|
"loss": 0.7279, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.907788369024774e-06, |
|
"loss": 0.6979, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.888716151139594e-06, |
|
"loss": 0.6738, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 1.8697360024220151e-06, |
|
"loss": 0.9448, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.850847998483865e-06, |
|
"loss": 0.7003, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.8320522145698904e-06, |
|
"loss": 0.8084, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.8133487255574761e-06, |
|
"loss": 0.6528, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.7947376059563037e-06, |
|
"loss": 0.8335, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.776218929908091e-06, |
|
"loss": 0.7983, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.7577927711862925e-06, |
|
"loss": 0.7816, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.7394592031957895e-06, |
|
"loss": 0.7251, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.7212182989726045e-06, |
|
"loss": 0.7096, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.7030701311836207e-06, |
|
"loss": 0.7842, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.685014772126281e-06, |
|
"loss": 0.7881, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.6670522937282967e-06, |
|
"loss": 0.7318, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.649182767547372e-06, |
|
"loss": 0.7043, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.6314062647709266e-06, |
|
"loss": 0.6443, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.613722856215777e-06, |
|
"loss": 0.7562, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 1.5961326123278968e-06, |
|
"loss": 0.7012, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.578635603182102e-06, |
|
"loss": 0.8093, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.5612318984817964e-06, |
|
"loss": 0.7172, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.5439215675586787e-06, |
|
"loss": 0.6434, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.5267046793724703e-06, |
|
"loss": 0.8035, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.509581302510657e-06, |
|
"loss": 0.8322, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"eval_loss": 0.6654554605484009, |
|
"eval_runtime": 2.7545, |
|
"eval_samples_per_second": 3.267, |
|
"eval_steps_per_second": 0.726, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.492551505188175e-06, |
|
"loss": 0.785, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.4756153552471757e-06, |
|
"loss": 0.77, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.4587729201567563e-06, |
|
"loss": 0.8117, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.4420242670126678e-06, |
|
"loss": 0.8486, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.4253694625370546e-06, |
|
"loss": 0.7617, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 1.4088085730782047e-06, |
|
"loss": 0.671, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.3923416646102639e-06, |
|
"loss": 0.72, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.375968802732991e-06, |
|
"loss": 0.6535, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.359690052671475e-06, |
|
"loss": 0.7342, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.343505479275911e-06, |
|
"loss": 0.7025, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.327415147021302e-06, |
|
"loss": 0.7894, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.3114191200072262e-06, |
|
"loss": 0.7308, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.29551746195756e-06, |
|
"loss": 0.7034, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.2797102362202689e-06, |
|
"loss": 0.6894, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.2639975057670976e-06, |
|
"loss": 0.8328, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.2483793331933642e-06, |
|
"loss": 0.6765, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.2328557807176787e-06, |
|
"loss": 0.8112, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.2174269101817292e-06, |
|
"loss": 0.6945, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.2020927830499962e-06, |
|
"loss": 0.8214, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.18685346040954e-06, |
|
"loss": 0.8183, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.1717090029697426e-06, |
|
"loss": 0.7326, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 1.1566594710620715e-06, |
|
"loss": 0.7728, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.1417049246398276e-06, |
|
"loss": 0.8001, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.1268454232779196e-06, |
|
"loss": 0.7958, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.1120810261726288e-06, |
|
"loss": 0.7506, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.0974117921413563e-06, |
|
"loss": 0.8157, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.0828377796224004e-06, |
|
"loss": 0.7601, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.0683590466747328e-06, |
|
"loss": 0.6816, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.053975650977748e-06, |
|
"loss": 0.7532, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.0396876498310337e-06, |
|
"loss": 0.6189, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.0254951001541725e-06, |
|
"loss": 0.6646, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 1.0113980584864818e-06, |
|
"loss": 0.6928, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.97396580986798e-07, |
|
"loss": 0.8421, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.834907234332558e-07, |
|
"loss": 0.78, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.69680541223078e-07, |
|
"loss": 0.712, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.559660893723287e-07, |
|
"loss": 0.647, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 9.423474225157075e-07, |
|
"loss": 0.7082, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.28824594906344e-07, |
|
"loss": 0.903, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.153976604155623e-07, |
|
"loss": 0.7843, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 9.020666725326726e-07, |
|
"loss": 0.6783, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.88831684364766e-07, |
|
"loss": 0.7477, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.756927486364863e-07, |
|
"loss": 0.8019, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 8.626499176898367e-07, |
|
"loss": 0.8045, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.497032434839652e-07, |
|
"loss": 0.77, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.368527775949597e-07, |
|
"loss": 0.6848, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.24098571215634e-07, |
|
"loss": 0.651, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.114406751553283e-07, |
|
"loss": 0.7339, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.988791398397116e-07, |
|
"loss": 0.6913, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.864140153105825e-07, |
|
"loss": 0.7095, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.740453512256579e-07, |
|
"loss": 0.6741, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.617731968583813e-07, |
|
"loss": 0.5972, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.495976010977345e-07, |
|
"loss": 0.6605, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.375186124480321e-07, |
|
"loss": 0.7978, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.255362790287268e-07, |
|
"loss": 0.7945, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.136506485742239e-07, |
|
"loss": 0.742, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 7.018617684337037e-07, |
|
"loss": 0.7343, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.901696855709045e-07, |
|
"loss": 0.7478, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 6.785744465639509e-07, |
|
"loss": 0.7499, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.670760976051737e-07, |
|
"loss": 0.8013, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.556746845009148e-07, |
|
"loss": 0.7364, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.443702526713558e-07, |
|
"loss": 0.8204, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.331628471503176e-07, |
|
"loss": 0.7391, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.220525125851167e-07, |
|
"loss": 0.7273, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.110392932363429e-07, |
|
"loss": 0.8718, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.001232329777118e-07, |
|
"loss": 0.6898, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.893043752958932e-07, |
|
"loss": 0.6451, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.78582763290314e-07, |
|
"loss": 0.7174, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.679584396730053e-07, |
|
"loss": 0.6401, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 5.574314467684333e-07, |
|
"loss": 0.7426, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.470018265133132e-07, |
|
"loss": 0.7741, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.366696204564648e-07, |
|
"loss": 0.7337, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.264348697586269e-07, |
|
"loss": 0.6034, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.162976151923099e-07, |
|
"loss": 0.6708, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 5.06257897141621e-07, |
|
"loss": 0.7809, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.963157556021115e-07, |
|
"loss": 0.6825, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.864712301806046e-07, |
|
"loss": 0.5708, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.7672436009505995e-07, |
|
"loss": 0.6695, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.670751841743981e-07, |
|
"loss": 0.7113, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 4.5752374085835105e-07, |
|
"loss": 0.742, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.4807006819730933e-07, |
|
"loss": 0.741, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.387142038521808e-07, |
|
"loss": 0.6562, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.294561850942208e-07, |
|
"loss": 0.7414, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.2029604880489405e-07, |
|
"loss": 0.8105, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.1123383147573524e-07, |
|
"loss": 0.8203, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 4.02269569208194e-07, |
|
"loss": 0.6563, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.934032977134849e-07, |
|
"loss": 0.7323, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.846350523124681e-07, |
|
"loss": 0.7055, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.7596486793547993e-07, |
|
"loss": 0.6357, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.673927791222137e-07, |
|
"loss": 0.7673, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 3.589188200215754e-07, |
|
"loss": 0.8615, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.5054302439155583e-07, |
|
"loss": 0.7333, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.422654255990809e-07, |
|
"loss": 0.7218, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.3408605661988114e-07, |
|
"loss": 0.7571, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.260049500383805e-07, |
|
"loss": 0.7701, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 3.180221380475412e-07, |
|
"loss": 0.7273, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.101376524487498e-07, |
|
"loss": 0.7209, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 3.0235152465168113e-07, |
|
"loss": 0.7771, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.9466378567419295e-07, |
|
"loss": 0.708, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.8707446614217315e-07, |
|
"loss": 0.6228, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 2.7958359628944e-07, |
|
"loss": 0.7535, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"eval_loss": 0.6655915975570679, |
|
"eval_runtime": 2.7617, |
|
"eval_samples_per_second": 3.259, |
|
"eval_steps_per_second": 0.724, |
|
"step": 1500 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1575, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 2.1090369312946913e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|