|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.7118402282453637, |
|
"eval_steps": 100, |
|
"global_step": 900, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-05, |
|
"loss": 1.8229, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999950203346446e-05, |
|
"loss": 1.8352, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999980081358417e-05, |
|
"loss": 1.742, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999955183130829e-05, |
|
"loss": 1.6315, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999920325751068e-05, |
|
"loss": 1.6204, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999875509357998e-05, |
|
"loss": 1.5485, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999820734130155e-05, |
|
"loss": 1.4713, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999756000285748e-05, |
|
"loss": 1.4675, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9996813080826606e-05, |
|
"loss": 1.5091, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999596657818445e-05, |
|
"loss": 1.4009, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9995020498303275e-05, |
|
"loss": 1.2992, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.999397484495198e-05, |
|
"loss": 1.2111, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.99928296222962e-05, |
|
"loss": 1.2281, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999158483489818e-05, |
|
"loss": 1.2099, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.99902404877168e-05, |
|
"loss": 1.1259, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998879658610761e-05, |
|
"loss": 1.022, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998725313582272e-05, |
|
"loss": 1.1276, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998561014301081e-05, |
|
"loss": 1.1276, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998386761421714e-05, |
|
"loss": 1.2404, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998202555638346e-05, |
|
"loss": 1.1622, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998008397684806e-05, |
|
"loss": 1.1335, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997804288334565e-05, |
|
"loss": 1.2187, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9975902284007405e-05, |
|
"loss": 1.0678, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.997366218736091e-05, |
|
"loss": 1.0195, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.99713226023301e-05, |
|
"loss": 1.0138, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9968883538235264e-05, |
|
"loss": 1.0706, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996634500479297e-05, |
|
"loss": 0.9617, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996370701211607e-05, |
|
"loss": 1.034, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.996096957071361e-05, |
|
"loss": 0.9952, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9958132691490816e-05, |
|
"loss": 0.9598, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995519638574907e-05, |
|
"loss": 1.0342, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9952160665185825e-05, |
|
"loss": 0.9147, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994902554189458e-05, |
|
"loss": 0.924, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994579102836482e-05, |
|
"loss": 0.9021, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.994245713748198e-05, |
|
"loss": 1.0732, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993902388252739e-05, |
|
"loss": 0.926, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9935491277178236e-05, |
|
"loss": 0.9082, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993185933550745e-05, |
|
"loss": 0.8735, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.992812807198372e-05, |
|
"loss": 1.0212, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9924297501471414e-05, |
|
"loss": 0.9495, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9920367639230483e-05, |
|
"loss": 0.9101, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991633850091645e-05, |
|
"loss": 1.0533, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991221010258034e-05, |
|
"loss": 0.8759, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.990798246066856e-05, |
|
"loss": 0.969, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9903655592022916e-05, |
|
"loss": 0.8463, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9899229513880494e-05, |
|
"loss": 0.844, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989470424387361e-05, |
|
"loss": 0.8386, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989007980002973e-05, |
|
"loss": 0.93, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988535620077138e-05, |
|
"loss": 0.8551, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.988053346491613e-05, |
|
"loss": 0.8633, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.987561161167648e-05, |
|
"loss": 0.9056, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9870590660659755e-05, |
|
"loss": 0.9366, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.986547063186808e-05, |
|
"loss": 1.0469, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.98602515456983e-05, |
|
"loss": 0.8723, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.985493342294184e-05, |
|
"loss": 0.9484, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9849516284784676e-05, |
|
"loss": 0.8346, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.984400015280724e-05, |
|
"loss": 0.789, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.983838504898433e-05, |
|
"loss": 0.8986, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9832670995685006e-05, |
|
"loss": 0.8799, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9826858015672536e-05, |
|
"loss": 0.9028, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.982094613210428e-05, |
|
"loss": 0.8908, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9814935368531586e-05, |
|
"loss": 0.8273, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980882574889973e-05, |
|
"loss": 0.784, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.980261729754781e-05, |
|
"loss": 0.9336, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9796310039208623e-05, |
|
"loss": 0.8943, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.978990399900861e-05, |
|
"loss": 1.1, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9783399202467714e-05, |
|
"loss": 0.877, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9776795675499296e-05, |
|
"loss": 0.8291, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9770093444410046e-05, |
|
"loss": 0.7997, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.9763292535899866e-05, |
|
"loss": 0.7911, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9756392977061736e-05, |
|
"loss": 0.8996, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974939479538166e-05, |
|
"loss": 0.8631, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974229801873854e-05, |
|
"loss": 0.8303, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.9735102675404004e-05, |
|
"loss": 0.8708, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.972780879404239e-05, |
|
"loss": 0.998, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.972041640371056e-05, |
|
"loss": 1.1049, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.971292553385783e-05, |
|
"loss": 0.774, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.970533621432581e-05, |
|
"loss": 0.8142, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.969764847534832e-05, |
|
"loss": 0.9968, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9689862347551255e-05, |
|
"loss": 0.9622, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9681977861952464e-05, |
|
"loss": 0.8342, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.967399504996162e-05, |
|
"loss": 0.9, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.966591394338012e-05, |
|
"loss": 0.8515, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.965773457440092e-05, |
|
"loss": 0.7937, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.964945697560844e-05, |
|
"loss": 0.9577, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9641081179978424e-05, |
|
"loss": 0.8233, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.963260722087779e-05, |
|
"loss": 0.9769, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.9624035132064526e-05, |
|
"loss": 0.8927, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.961536494768754e-05, |
|
"loss": 0.9383, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.960659670228652e-05, |
|
"loss": 0.9177, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.959773043079181e-05, |
|
"loss": 0.8026, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.958876616852427e-05, |
|
"loss": 0.9647, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9579703951195113e-05, |
|
"loss": 0.7545, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.957054381490579e-05, |
|
"loss": 0.7836, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9561285796147824e-05, |
|
"loss": 0.8803, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955192993180269e-05, |
|
"loss": 0.8988, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.9542476259141656e-05, |
|
"loss": 0.8418, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.953292481582561e-05, |
|
"loss": 0.9004, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9523275639904944e-05, |
|
"loss": 0.9159, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.951352876981939e-05, |
|
"loss": 0.7605, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 0.7751120924949646, |
|
"eval_runtime": 2.7398, |
|
"eval_samples_per_second": 3.285, |
|
"eval_steps_per_second": 0.73, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9503684244397894e-05, |
|
"loss": 0.8228, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.949374210285838e-05, |
|
"loss": 0.7176, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.948370238480769e-05, |
|
"loss": 0.9403, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9473565130241376e-05, |
|
"loss": 0.7711, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.946333037954354e-05, |
|
"loss": 0.8349, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.945299817348669e-05, |
|
"loss": 0.7899, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.944256855323157e-05, |
|
"loss": 0.913, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9432041560326995e-05, |
|
"loss": 0.9574, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.942141723670969e-05, |
|
"loss": 0.8848, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.941069562470411e-05, |
|
"loss": 0.9267, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.939987676702229e-05, |
|
"loss": 0.7779, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.938896070676366e-05, |
|
"loss": 0.9766, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9377947487414886e-05, |
|
"loss": 0.8737, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.936683715284968e-05, |
|
"loss": 0.7962, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.935562974732865e-05, |
|
"loss": 0.8711, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.934432531549909e-05, |
|
"loss": 0.8212, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.933292390239483e-05, |
|
"loss": 0.8364, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9321425553436054e-05, |
|
"loss": 0.8737, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9309830314429095e-05, |
|
"loss": 0.8382, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.92981382315663e-05, |
|
"loss": 0.9143, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9286349351425774e-05, |
|
"loss": 0.9366, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.927446372097127e-05, |
|
"loss": 0.7771, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9262481387551976e-05, |
|
"loss": 0.8218, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.925040239890227e-05, |
|
"loss": 0.9309, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.923822680314162e-05, |
|
"loss": 0.8345, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.922595464877435e-05, |
|
"loss": 0.8479, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.921358598468942e-05, |
|
"loss": 0.8879, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.920112086016029e-05, |
|
"loss": 0.8235, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.918855932484468e-05, |
|
"loss": 0.8693, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.917590142878437e-05, |
|
"loss": 0.8928, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.916314722240504e-05, |
|
"loss": 0.9152, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.915029675651604e-05, |
|
"loss": 0.9386, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.913735008231016e-05, |
|
"loss": 0.8565, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.9124307251363525e-05, |
|
"loss": 0.8073, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.911116831563524e-05, |
|
"loss": 0.8975, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.909793332746733e-05, |
|
"loss": 0.7949, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.908460233958444e-05, |
|
"loss": 0.8059, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.907117540509366e-05, |
|
"loss": 0.8424, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.9057652577484293e-05, |
|
"loss": 0.7911, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9044033910627676e-05, |
|
"loss": 0.858, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9030319458776926e-05, |
|
"loss": 0.977, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9016509276566746e-05, |
|
"loss": 0.8803, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9002603419013205e-05, |
|
"loss": 0.9155, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.8988601941513526e-05, |
|
"loss": 0.6968, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.897450489984583e-05, |
|
"loss": 0.8425, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.896031235016897e-05, |
|
"loss": 0.9646, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.894602434902226e-05, |
|
"loss": 0.8387, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.893164095332527e-05, |
|
"loss": 0.9245, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.89171622203776e-05, |
|
"loss": 0.861, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.890258820785865e-05, |
|
"loss": 0.7925, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8887918973827375e-05, |
|
"loss": 0.8197, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.887315457672208e-05, |
|
"loss": 0.8404, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8858295075360175e-05, |
|
"loss": 0.8094, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.884334052893793e-05, |
|
"loss": 0.8282, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.882829099703026e-05, |
|
"loss": 0.7838, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.881314653959046e-05, |
|
"loss": 0.8562, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8797907216950015e-05, |
|
"loss": 0.818, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.878257308981827e-05, |
|
"loss": 0.7874, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8767144219282325e-05, |
|
"loss": 0.9106, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.875162066680664e-05, |
|
"loss": 0.9026, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8736002494232904e-05, |
|
"loss": 0.7882, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.872028976377974e-05, |
|
"loss": 1.0285, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.870448253804244e-05, |
|
"loss": 0.8034, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.868858087999279e-05, |
|
"loss": 0.7977, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.86725848529787e-05, |
|
"loss": 0.885, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.86564945207241e-05, |
|
"loss": 0.804, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.864030994732852e-05, |
|
"loss": 0.9618, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.862403119726702e-05, |
|
"loss": 0.8042, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.860765833538974e-05, |
|
"loss": 1.0271, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.85911914269218e-05, |
|
"loss": 0.799, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8574630537462954e-05, |
|
"loss": 0.7454, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.855797573298734e-05, |
|
"loss": 0.7428, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8541227079843246e-05, |
|
"loss": 0.9079, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.852438464475283e-05, |
|
"loss": 0.7964, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8507448494811834e-05, |
|
"loss": 0.9262, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.849041869748935e-05, |
|
"loss": 0.7817, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.847329532062753e-05, |
|
"loss": 0.759, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.845607843244132e-05, |
|
"loss": 0.9029, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.843876810151821e-05, |
|
"loss": 0.7191, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.8421364396817905e-05, |
|
"loss": 0.8861, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.840386738767211e-05, |
|
"loss": 0.982, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.838627714378423e-05, |
|
"loss": 0.8453, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8368593735229074e-05, |
|
"loss": 0.9402, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8350817232452625e-05, |
|
"loss": 0.7882, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8332947706271706e-05, |
|
"loss": 0.8458, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.831498522787372e-05, |
|
"loss": 0.8955, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.829692986881638e-05, |
|
"loss": 0.7777, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8278781701027395e-05, |
|
"loss": 0.7341, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.826054079680422e-05, |
|
"loss": 0.8234, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.824220722881371e-05, |
|
"loss": 0.7693, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.8223781070091914e-05, |
|
"loss": 0.8162, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.8205262394043705e-05, |
|
"loss": 0.8983, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.818665127444253e-05, |
|
"loss": 0.9481, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.816794778543011e-05, |
|
"loss": 0.9685, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.814915200151614e-05, |
|
"loss": 0.9092, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.813026399757799e-05, |
|
"loss": 0.7715, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.811128384886041e-05, |
|
"loss": 0.8279, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.809221163097523e-05, |
|
"loss": 0.7468, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.807304741990106e-05, |
|
"loss": 0.7938, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.805379129198299e-05, |
|
"loss": 0.9049, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"eval_loss": 0.7435702681541443, |
|
"eval_runtime": 2.8319, |
|
"eval_samples_per_second": 3.178, |
|
"eval_steps_per_second": 0.706, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.803444332393227e-05, |
|
"loss": 0.8812, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.801500359282605e-05, |
|
"loss": 0.84, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7995472176106974e-05, |
|
"loss": 0.8609, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.797584915158301e-05, |
|
"loss": 0.8207, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.795613459742702e-05, |
|
"loss": 0.7714, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7936328592176516e-05, |
|
"loss": 0.7537, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7916431214733314e-05, |
|
"loss": 0.8651, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7896442544363246e-05, |
|
"loss": 0.8055, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.787636266069582e-05, |
|
"loss": 0.8936, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.785619164372391e-05, |
|
"loss": 0.701, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.783592957380346e-05, |
|
"loss": 0.7145, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.781557653165313e-05, |
|
"loss": 0.896, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.779513259835399e-05, |
|
"loss": 0.7178, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.777459785534918e-05, |
|
"loss": 0.8032, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7753972384443644e-05, |
|
"loss": 0.8792, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.773325626780373e-05, |
|
"loss": 0.7246, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.771244958795688e-05, |
|
"loss": 0.8574, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7691552427791365e-05, |
|
"loss": 0.7291, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7670564870555864e-05, |
|
"loss": 0.8259, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.7649486999859185e-05, |
|
"loss": 0.7818, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.762831889966992e-05, |
|
"loss": 0.8693, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.760706065431612e-05, |
|
"loss": 0.8281, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.758571234848494e-05, |
|
"loss": 0.8359, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7564274067222314e-05, |
|
"loss": 0.8119, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.754274589593262e-05, |
|
"loss": 0.8577, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.752112792037832e-05, |
|
"loss": 0.7509, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.7499420226679646e-05, |
|
"loss": 0.7648, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.747762290131424e-05, |
|
"loss": 0.7964, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7455736031116804e-05, |
|
"loss": 0.8229, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7433759703278785e-05, |
|
"loss": 0.7705, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.741169400534797e-05, |
|
"loss": 0.8954, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7389539025228217e-05, |
|
"loss": 0.8405, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7367294851179e-05, |
|
"loss": 0.7539, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.734496157181518e-05, |
|
"loss": 0.7844, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.732253927610657e-05, |
|
"loss": 0.7417, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7300028053377564e-05, |
|
"loss": 0.8239, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7277427993306867e-05, |
|
"loss": 0.7526, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7254739185927066e-05, |
|
"loss": 0.8431, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7231961721624296e-05, |
|
"loss": 0.737, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.720909569113787e-05, |
|
"loss": 0.751, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.718614118555993e-05, |
|
"loss": 0.8756, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.71630982963351e-05, |
|
"loss": 0.7679, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.713996711526005e-05, |
|
"loss": 0.7593, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 4.7116747734483245e-05, |
|
"loss": 0.8194, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7093440246504465e-05, |
|
"loss": 0.9187, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.70700447441745e-05, |
|
"loss": 0.7543, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.704656132069478e-05, |
|
"loss": 0.6961, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.7022990069616973e-05, |
|
"loss": 0.8313, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 4.699933108484262e-05, |
|
"loss": 0.8524, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.69755844606228e-05, |
|
"loss": 0.7614, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.695175029155771e-05, |
|
"loss": 0.7678, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.692782867259627e-05, |
|
"loss": 0.9097, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.690381969903583e-05, |
|
"loss": 0.8819, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 4.6879723466521706e-05, |
|
"loss": 0.883, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.685554007104684e-05, |
|
"loss": 0.7365, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.683126960895141e-05, |
|
"loss": 0.8576, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.680691217692242e-05, |
|
"loss": 0.8227, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.678246787199338e-05, |
|
"loss": 0.7949, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.6757936791543846e-05, |
|
"loss": 0.8524, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 4.6733319033299075e-05, |
|
"loss": 0.7235, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.670861469532963e-05, |
|
"loss": 0.8348, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.668382387605098e-05, |
|
"loss": 0.7539, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.665894667422311e-05, |
|
"loss": 0.7968, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.663398318895013e-05, |
|
"loss": 1.0207, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.660893351967988e-05, |
|
"loss": 0.7758, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.658379776620354e-05, |
|
"loss": 0.8379, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6558576028655226e-05, |
|
"loss": 0.8968, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6533268407511586e-05, |
|
"loss": 0.8639, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.650787500359141e-05, |
|
"loss": 0.713, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 4.6482395918055197e-05, |
|
"loss": 0.9117, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6456831252404844e-05, |
|
"loss": 0.7259, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.64311811084831e-05, |
|
"loss": 0.7107, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.64054455884733e-05, |
|
"loss": 0.8522, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.637962479489883e-05, |
|
"loss": 0.764, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.635371883062286e-05, |
|
"loss": 0.8385, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 4.6327727798847785e-05, |
|
"loss": 0.9269, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.630165180311493e-05, |
|
"loss": 0.7428, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6275490947304084e-05, |
|
"loss": 0.8363, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6249245335633086e-05, |
|
"loss": 0.8526, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6222915072657424e-05, |
|
"loss": 0.8759, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.6196500263269824e-05, |
|
"loss": 0.7692, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.617000101269982e-05, |
|
"loss": 0.8109, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.614341742651332e-05, |
|
"loss": 0.6978, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6116749610612214e-05, |
|
"loss": 0.7617, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.6089997671233955e-05, |
|
"loss": 0.8812, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 4.606316171495109e-05, |
|
"loss": 0.7013, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.60362418486709e-05, |
|
"loss": 0.8073, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.6009238179634916e-05, |
|
"loss": 0.8615, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.598215081541853e-05, |
|
"loss": 0.9124, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.595497986393055e-05, |
|
"loss": 0.8016, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 4.592772543341276e-05, |
|
"loss": 0.8414, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.590038763243953e-05, |
|
"loss": 0.7662, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.587296656991734e-05, |
|
"loss": 0.7667, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.584546235508436e-05, |
|
"loss": 0.8404, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.581787509751001e-05, |
|
"loss": 0.9285, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5790204907094554e-05, |
|
"loss": 0.863, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 4.5762451894068606e-05, |
|
"loss": 0.941, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.573461616899275e-05, |
|
"loss": 0.9033, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.570669784275706e-05, |
|
"loss": 0.8648, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.567869702658067e-05, |
|
"loss": 0.8397, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"eval_loss": 0.7216992974281311, |
|
"eval_runtime": 2.8122, |
|
"eval_samples_per_second": 3.2, |
|
"eval_steps_per_second": 0.711, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.565061383201135e-05, |
|
"loss": 0.7869, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 4.562244837092501e-05, |
|
"loss": 0.8699, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.559420075552532e-05, |
|
"loss": 0.7533, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.5565871098343206e-05, |
|
"loss": 0.7445, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.553745951223645e-05, |
|
"loss": 0.8536, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.55089661103892e-05, |
|
"loss": 0.8168, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 4.548039100631153e-05, |
|
"loss": 0.7869, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.5451734313839026e-05, |
|
"loss": 0.7409, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.5422996147132257e-05, |
|
"loss": 0.8334, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.53941766206764e-05, |
|
"loss": 0.7438, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.536527584928073e-05, |
|
"loss": 0.8303, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 4.533629394807819e-05, |
|
"loss": 0.7954, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5307231032524897e-05, |
|
"loss": 0.694, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5278087218399735e-05, |
|
"loss": 0.8333, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.524886262180387e-05, |
|
"loss": 0.8683, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.5219557359160244e-05, |
|
"loss": 0.8963, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.519017154721321e-05, |
|
"loss": 0.8737, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 4.516070530302795e-05, |
|
"loss": 0.8319, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.51311587439901e-05, |
|
"loss": 0.7937, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.510153198780524e-05, |
|
"loss": 0.7702, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.507182515249845e-05, |
|
"loss": 0.9189, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.504203835641379e-05, |
|
"loss": 0.8915, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 4.501217171821389e-05, |
|
"loss": 0.8164, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4982225356879436e-05, |
|
"loss": 0.8632, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.4952199391708716e-05, |
|
"loss": 0.7958, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.492209394231714e-05, |
|
"loss": 0.7706, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.489190912863675e-05, |
|
"loss": 0.8457, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.486164507091578e-05, |
|
"loss": 0.8253, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.483130188971812e-05, |
|
"loss": 0.8703, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4800879705922885e-05, |
|
"loss": 0.7406, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4770378640723904e-05, |
|
"loss": 0.8331, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.4739798815629264e-05, |
|
"loss": 0.8479, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 4.47091403524608e-05, |
|
"loss": 0.8482, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.4678403373353614e-05, |
|
"loss": 0.848, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.464758800075561e-05, |
|
"loss": 0.8397, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.461669435742698e-05, |
|
"loss": 0.7962, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.458572256643973e-05, |
|
"loss": 0.8888, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.455467275117718e-05, |
|
"loss": 0.7731, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 4.452354503533348e-05, |
|
"loss": 0.9121, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.449233954291313e-05, |
|
"loss": 0.7023, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.446105639823044e-05, |
|
"loss": 0.7888, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.442969572590909e-05, |
|
"loss": 0.7791, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.439825765088161e-05, |
|
"loss": 0.8437, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 4.436674229838887e-05, |
|
"loss": 1.0018, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.433514979397959e-05, |
|
"loss": 0.8058, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4303480263509855e-05, |
|
"loss": 0.6082, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.42717338331426e-05, |
|
"loss": 0.8858, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.4239910629347096e-05, |
|
"loss": 0.8144, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 4.420801077889848e-05, |
|
"loss": 0.8518, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.41760344088772e-05, |
|
"loss": 0.7277, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.414398164666856e-05, |
|
"loss": 0.9246, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.41118526199622e-05, |
|
"loss": 0.8504, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.407964745675153e-05, |
|
"loss": 0.7957, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 4.404736628533332e-05, |
|
"loss": 0.7425, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.401500923430712e-05, |
|
"loss": 0.9097, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.398257643257474e-05, |
|
"loss": 0.7344, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.395006800933978e-05, |
|
"loss": 0.8781, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.391748409410712e-05, |
|
"loss": 0.8264, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.388482481668232e-05, |
|
"loss": 0.7782, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.385209030717123e-05, |
|
"loss": 0.9151, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3819280695979354e-05, |
|
"loss": 0.7878, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.37863961138114e-05, |
|
"loss": 0.8142, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.3753436691670746e-05, |
|
"loss": 0.707, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.372040256085891e-05, |
|
"loss": 0.7494, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.368729385297501e-05, |
|
"loss": 0.9402, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.365411069991528e-05, |
|
"loss": 0.8291, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3620853233872525e-05, |
|
"loss": 0.9221, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.3587521587335586e-05, |
|
"loss": 0.775, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.355411589308881e-05, |
|
"loss": 0.8099, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.352063628421155e-05, |
|
"loss": 0.8428, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3487082894077594e-05, |
|
"loss": 0.8306, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.345345585635468e-05, |
|
"loss": 0.9234, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3419755305003904e-05, |
|
"loss": 0.8925, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3385981374279264e-05, |
|
"loss": 0.8707, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.3352134198727036e-05, |
|
"loss": 0.7951, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.331821391318532e-05, |
|
"loss": 0.7105, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.328422065278346e-05, |
|
"loss": 0.6983, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.325015455294148e-05, |
|
"loss": 0.7357, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.321601574936962e-05, |
|
"loss": 0.8573, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.3181804378067726e-05, |
|
"loss": 0.9227, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 4.314752057532475e-05, |
|
"loss": 0.791, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3113164477718174e-05, |
|
"loss": 0.7773, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.3078736222113496e-05, |
|
"loss": 0.9276, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.304423594566368e-05, |
|
"loss": 0.7723, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.300966378580858e-05, |
|
"loss": 0.8327, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 4.297501988027442e-05, |
|
"loss": 0.7469, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.294030436707327e-05, |
|
"loss": 0.6427, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.290551738450241e-05, |
|
"loss": 0.8161, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2870659071143894e-05, |
|
"loss": 0.7147, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2835729565863884e-05, |
|
"loss": 0.8505, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 4.2800729007812196e-05, |
|
"loss": 0.8531, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.2765657536421675e-05, |
|
"loss": 0.7539, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.273051529140767e-05, |
|
"loss": 0.8358, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.269530241276749e-05, |
|
"loss": 0.7724, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.2660019040779805e-05, |
|
"loss": 0.8011, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 4.262466531600413e-05, |
|
"loss": 0.7952, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.258924137928023e-05, |
|
"loss": 0.7196, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.2553747371727606e-05, |
|
"loss": 0.7226, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.251818343474486e-05, |
|
"loss": 0.7577, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.24825497100092e-05, |
|
"loss": 0.7186, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_loss": 0.7108685374259949, |
|
"eval_runtime": 2.7837, |
|
"eval_samples_per_second": 3.233, |
|
"eval_steps_per_second": 0.718, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.244684633947586e-05, |
|
"loss": 0.8418, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 4.241107346537748e-05, |
|
"loss": 0.8305, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.237523123022365e-05, |
|
"loss": 1.0274, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.233931977680021e-05, |
|
"loss": 0.8431, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.230333924816881e-05, |
|
"loss": 0.8005, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.22672897876662e-05, |
|
"loss": 0.8787, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 4.223117153890381e-05, |
|
"loss": 0.9091, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2194984645767065e-05, |
|
"loss": 0.7993, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.215872925241486e-05, |
|
"loss": 0.9422, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.212240550327898e-05, |
|
"loss": 0.7923, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.2086013543063504e-05, |
|
"loss": 0.8263, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 4.204955351674428e-05, |
|
"loss": 0.7529, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.201302556956827e-05, |
|
"loss": 1.0105, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.197642984705305e-05, |
|
"loss": 0.7613, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1939766494986174e-05, |
|
"loss": 0.7851, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.190303565942463e-05, |
|
"loss": 0.687, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.1866237486694225e-05, |
|
"loss": 0.7759, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.182937212338903e-05, |
|
"loss": 0.8702, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1792439716370786e-05, |
|
"loss": 0.8531, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.175544041276831e-05, |
|
"loss": 0.8694, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1718374359976926e-05, |
|
"loss": 0.7487, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.1681241705657865e-05, |
|
"loss": 0.7525, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.164404259773768e-05, |
|
"loss": 0.8515, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.160677718440765e-05, |
|
"loss": 0.7136, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.156944561412323e-05, |
|
"loss": 0.9546, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.1532048035603376e-05, |
|
"loss": 0.6966, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.149458459783004e-05, |
|
"loss": 0.8917, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.1457055450047534e-05, |
|
"loss": 0.7036, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.141946074176192e-05, |
|
"loss": 0.7952, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.138180062274045e-05, |
|
"loss": 0.7388, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.134407524301098e-05, |
|
"loss": 0.8248, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.13062847528613e-05, |
|
"loss": 0.698, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.126842930283862e-05, |
|
"loss": 0.8154, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.123050904374891e-05, |
|
"loss": 0.8472, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1192524126656337e-05, |
|
"loss": 0.946, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.1154474702882626e-05, |
|
"loss": 0.8445, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.111636092400652e-05, |
|
"loss": 0.7753, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.107818294186308e-05, |
|
"loss": 0.8108, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.10399409085432e-05, |
|
"loss": 0.7051, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.100163497639288e-05, |
|
"loss": 0.9362, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.096326529801271e-05, |
|
"loss": 0.7638, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.09248320262572e-05, |
|
"loss": 0.7786, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.088633531423423e-05, |
|
"loss": 0.8217, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 4.084777531530439e-05, |
|
"loss": 0.8158, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.08091521830804e-05, |
|
"loss": 0.7217, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.077046607142646e-05, |
|
"loss": 0.7779, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.07317171344577e-05, |
|
"loss": 0.7776, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.069290552653951e-05, |
|
"loss": 0.8132, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.065403140228693e-05, |
|
"loss": 0.768, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.0615094916564087e-05, |
|
"loss": 0.7917, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.057609622448349e-05, |
|
"loss": 0.7716, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.053703548140551e-05, |
|
"loss": 0.7192, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.049791284293768e-05, |
|
"loss": 0.7797, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 4.045872846493412e-05, |
|
"loss": 0.8675, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0419482503494896e-05, |
|
"loss": 0.7104, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0380175114965426e-05, |
|
"loss": 0.8268, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.034080645593581e-05, |
|
"loss": 0.8496, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.030137668324024e-05, |
|
"loss": 0.7655, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.026188595395639e-05, |
|
"loss": 0.783, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.0222334425404757e-05, |
|
"loss": 0.7517, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.018272225514802e-05, |
|
"loss": 0.7921, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.014304960099048e-05, |
|
"loss": 0.7849, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.010331662097736e-05, |
|
"loss": 0.781, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.0063523473394215e-05, |
|
"loss": 0.8017, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 4.00236703167663e-05, |
|
"loss": 0.7774, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.998375730985791e-05, |
|
"loss": 0.8126, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.994378461167178e-05, |
|
"loss": 0.7426, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9903752381448465e-05, |
|
"loss": 0.7115, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.986366077866562e-05, |
|
"loss": 0.8825, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.9823509963037464e-05, |
|
"loss": 0.8392, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.978330009451411e-05, |
|
"loss": 0.9632, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.97430313332809e-05, |
|
"loss": 0.7299, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.97027038397578e-05, |
|
"loss": 0.7646, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.966231777459875e-05, |
|
"loss": 0.7527, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.962187329869102e-05, |
|
"loss": 0.9925, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.958137057315457e-05, |
|
"loss": 0.8299, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9540809759341426e-05, |
|
"loss": 0.8115, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.9500191018835e-05, |
|
"loss": 0.7775, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.945951451344948e-05, |
|
"loss": 0.7463, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.941878040522918e-05, |
|
"loss": 0.912, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.937798885644789e-05, |
|
"loss": 0.8419, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.93371400296082e-05, |
|
"loss": 0.9328, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.929623408744092e-05, |
|
"loss": 0.7434, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.925527119290436e-05, |
|
"loss": 0.7526, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.9214251509183733e-05, |
|
"loss": 0.7491, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.917317519969046e-05, |
|
"loss": 0.7936, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.913204242806158e-05, |
|
"loss": 0.8184, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.909085335815904e-05, |
|
"loss": 0.7352, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.904960815406906e-05, |
|
"loss": 0.7438, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.900830698010149e-05, |
|
"loss": 0.7998, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.896695000078917e-05, |
|
"loss": 0.7039, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.892553738088721e-05, |
|
"loss": 0.9362, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.888406928537242e-05, |
|
"loss": 0.7281, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.884254587944258e-05, |
|
"loss": 0.7993, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.880096732851581e-05, |
|
"loss": 0.6587, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.8759333798229955e-05, |
|
"loss": 0.8164, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8717645454441836e-05, |
|
"loss": 0.7832, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.867590246322665e-05, |
|
"loss": 0.8283, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.863410499087731e-05, |
|
"loss": 0.9244, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.859225320390375e-05, |
|
"loss": 0.7304, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_loss": 0.700370192527771, |
|
"eval_runtime": 2.8512, |
|
"eval_samples_per_second": 3.157, |
|
"eval_steps_per_second": 0.701, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.8550347269032285e-05, |
|
"loss": 0.8409, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.850838735320494e-05, |
|
"loss": 0.7045, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.84663736235788e-05, |
|
"loss": 0.8243, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.842430624752529e-05, |
|
"loss": 0.7521, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.83821853926296e-05, |
|
"loss": 0.7212, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.834001122668992e-05, |
|
"loss": 0.8555, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 3.829778391771684e-05, |
|
"loss": 0.748, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.825550363393268e-05, |
|
"loss": 0.7956, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8213170543770724e-05, |
|
"loss": 0.7905, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8170784815874705e-05, |
|
"loss": 0.7764, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.812834661909801e-05, |
|
"loss": 0.8791, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.808585612250305e-05, |
|
"loss": 0.8355, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.8043313495360586e-05, |
|
"loss": 0.7779, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.800071890714906e-05, |
|
"loss": 0.9408, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.79580725275539e-05, |
|
"loss": 0.92, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.791537452646687e-05, |
|
"loss": 0.857, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.787262507398538e-05, |
|
"loss": 0.8673, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7829824340411795e-05, |
|
"loss": 0.8399, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.778697249625279e-05, |
|
"loss": 0.7836, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.774406971221863e-05, |
|
"loss": 1.0116, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.770111615922253e-05, |
|
"loss": 0.8791, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.765811200837994e-05, |
|
"loss": 0.7788, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.7615057431007885e-05, |
|
"loss": 0.8853, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.757195259862427e-05, |
|
"loss": 0.7102, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7528797682947206e-05, |
|
"loss": 0.7532, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.7485592855894346e-05, |
|
"loss": 0.7091, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.744233828958213e-05, |
|
"loss": 0.8155, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 3.739903415632518e-05, |
|
"loss": 0.8474, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.735568062863557e-05, |
|
"loss": 0.8274, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.731227787922214e-05, |
|
"loss": 0.8146, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.726882608098984e-05, |
|
"loss": 0.7811, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.722532540703898e-05, |
|
"loss": 0.669, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.718177603066462e-05, |
|
"loss": 0.6793, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.71381781253558e-05, |
|
"loss": 0.7047, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.709453186479491e-05, |
|
"loss": 0.6797, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.705083742285697e-05, |
|
"loss": 0.7729, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.700709497360894e-05, |
|
"loss": 0.8578, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 3.6963304691309026e-05, |
|
"loss": 0.7326, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.691946675040598e-05, |
|
"loss": 0.7918, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6875581325538436e-05, |
|
"loss": 0.7694, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.6831648591534166e-05, |
|
"loss": 0.8746, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.678766872340943e-05, |
|
"loss": 0.8269, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.674364189636824e-05, |
|
"loss": 0.8922, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 3.66995682858017e-05, |
|
"loss": 0.7966, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.665544806728726e-05, |
|
"loss": 0.7073, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.661128141658807e-05, |
|
"loss": 0.8515, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.656706850965225e-05, |
|
"loss": 0.8462, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.652280952261216e-05, |
|
"loss": 0.7801, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 3.647850463178378e-05, |
|
"loss": 0.8474, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.643415401366591e-05, |
|
"loss": 0.7059, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.638975784493957e-05, |
|
"loss": 0.7343, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.634531630246718e-05, |
|
"loss": 0.7755, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.630082956329197e-05, |
|
"loss": 0.7483, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 3.6256297804637185e-05, |
|
"loss": 0.7652, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.621172120390544e-05, |
|
"loss": 0.7683, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.616709993867796e-05, |
|
"loss": 0.7455, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.612243418671395e-05, |
|
"loss": 0.6761, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.607772412594978e-05, |
|
"loss": 0.8251, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.6032969934498376e-05, |
|
"loss": 0.7908, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.5988171790648454e-05, |
|
"loss": 0.7453, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.594332987286383e-05, |
|
"loss": 0.812, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.589844435978269e-05, |
|
"loss": 0.7476, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.585351543021691e-05, |
|
"loss": 0.8996, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.58085432631513e-05, |
|
"loss": 0.6811, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 3.576352803774296e-05, |
|
"loss": 0.7718, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.571846993332048e-05, |
|
"loss": 0.8054, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.567336912938329e-05, |
|
"loss": 0.7993, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.5628225805600914e-05, |
|
"loss": 0.7635, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.558304014181228e-05, |
|
"loss": 0.8685, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 3.553781231802497e-05, |
|
"loss": 0.7903, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.549254251441451e-05, |
|
"loss": 0.7809, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5447230911323694e-05, |
|
"loss": 0.6816, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.540187768926182e-05, |
|
"loss": 0.7613, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5356483028903976e-05, |
|
"loss": 0.6891, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3.5311047111090346e-05, |
|
"loss": 0.8444, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5265570116825455e-05, |
|
"loss": 0.7576, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5220052227277467e-05, |
|
"loss": 0.7738, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.517449362377748e-05, |
|
"loss": 0.8101, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.512889448781876e-05, |
|
"loss": 0.7547, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 3.5083255001056074e-05, |
|
"loss": 0.8112, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.503757534530491e-05, |
|
"loss": 0.7774, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.499185570254078e-05, |
|
"loss": 0.7974, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.4946096254898516e-05, |
|
"loss": 0.7291, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.490029718467149e-05, |
|
"loss": 0.833, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.485445867431095e-05, |
|
"loss": 0.7575, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 3.4808580906425254e-05, |
|
"loss": 0.8991, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4762664063779135e-05, |
|
"loss": 0.7575, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4716708329293e-05, |
|
"loss": 0.7832, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.46707138860422e-05, |
|
"loss": 0.7383, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.462468091725628e-05, |
|
"loss": 0.879, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 3.4578609606318265e-05, |
|
"loss": 0.8216, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.453250013676392e-05, |
|
"loss": 0.6724, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4486352692281035e-05, |
|
"loss": 0.8348, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4440167456708665e-05, |
|
"loss": 0.9279, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.439394461403644e-05, |
|
"loss": 0.7776, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 3.4347684348403766e-05, |
|
"loss": 0.6902, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.430138684409918e-05, |
|
"loss": 0.7233, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.425505228555954e-05, |
|
"loss": 0.8372, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.42086808573693e-05, |
|
"loss": 0.7619, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.416227274425985e-05, |
|
"loss": 0.7765, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"eval_loss": 0.6920838952064514, |
|
"eval_runtime": 2.7184, |
|
"eval_samples_per_second": 3.311, |
|
"eval_steps_per_second": 0.736, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.4115828131108665e-05, |
|
"loss": 0.832, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.406934720293865e-05, |
|
"loss": 0.8499, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.402283014491741e-05, |
|
"loss": 0.7741, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.39762771423564e-05, |
|
"loss": 0.7704, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.392968838071036e-05, |
|
"loss": 0.714, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.388306404557642e-05, |
|
"loss": 0.859, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 3.383640432269346e-05, |
|
"loss": 0.6714, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.378970939794133e-05, |
|
"loss": 0.7763, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.37429794573401e-05, |
|
"loss": 0.7375, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.3696214687049345e-05, |
|
"loss": 0.7542, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.364941527336739e-05, |
|
"loss": 0.7633, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 3.3602581402730574e-05, |
|
"loss": 0.8715, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.355571326171249e-05, |
|
"loss": 0.925, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.350881103702327e-05, |
|
"loss": 0.8448, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.346187491550883e-05, |
|
"loss": 0.7896, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.34149050841501e-05, |
|
"loss": 0.8109, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 3.336790173006232e-05, |
|
"loss": 0.7915, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3320865040494266e-05, |
|
"loss": 0.7159, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3273795202827514e-05, |
|
"loss": 0.6816, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.32266924045757e-05, |
|
"loss": 0.7911, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.317955683338376e-05, |
|
"loss": 0.6496, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.3132388677027196e-05, |
|
"loss": 0.6928, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 3.308518812341131e-05, |
|
"loss": 0.741, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.303795536057047e-05, |
|
"loss": 0.8024, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.299069057666736e-05, |
|
"loss": 0.7628, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.294339395999224e-05, |
|
"loss": 0.7485, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.2896065698962145e-05, |
|
"loss": 0.7243, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 3.2848705982120204e-05, |
|
"loss": 0.9272, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.2801314998134855e-05, |
|
"loss": 0.6828, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.275389293579908e-05, |
|
"loss": 0.8215, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.27064399840297e-05, |
|
"loss": 0.7792, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.265895633186657e-05, |
|
"loss": 0.8034, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 3.2611442168471817e-05, |
|
"loss": 0.8222, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.256389768312917e-05, |
|
"loss": 0.7199, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.2516323065243135e-05, |
|
"loss": 0.7743, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.246871850433824e-05, |
|
"loss": 0.7197, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.242108419005832e-05, |
|
"loss": 0.8729, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 3.237342031216574e-05, |
|
"loss": 0.8425, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.23257270605406e-05, |
|
"loss": 0.7835, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.227800462518007e-05, |
|
"loss": 0.7397, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.223025319619755e-05, |
|
"loss": 0.8645, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.2182472963821954e-05, |
|
"loss": 0.8268, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.213466411839693e-05, |
|
"loss": 0.8185, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 3.208682685038011e-05, |
|
"loss": 0.8279, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.203896135034238e-05, |
|
"loss": 0.75, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.199106780896707e-05, |
|
"loss": 0.781, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.1943146417049236e-05, |
|
"loss": 0.7298, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.189519736549486e-05, |
|
"loss": 0.802, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.184722084532013e-05, |
|
"loss": 0.712, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1799217047650664e-05, |
|
"loss": 0.7124, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.175118616372074e-05, |
|
"loss": 0.7176, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1703128384872535e-05, |
|
"loss": 0.833, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.165504390255538e-05, |
|
"loss": 0.7863, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 3.1606932908324974e-05, |
|
"loss": 0.8052, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.155879559384265e-05, |
|
"loss": 0.7619, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.151063215087456e-05, |
|
"loss": 0.811, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.146244277129099e-05, |
|
"loss": 0.9439, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.141422764706552e-05, |
|
"loss": 0.7671, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 3.136598697027429e-05, |
|
"loss": 0.772, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.131772093309524e-05, |
|
"loss": 0.9286, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.1269429727807356e-05, |
|
"loss": 0.8236, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.122111354678985e-05, |
|
"loss": 0.7815, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.117277258252147e-05, |
|
"loss": 0.6822, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.112440702757967e-05, |
|
"loss": 0.7596, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 3.107601707463987e-05, |
|
"loss": 0.6976, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.102760291647469e-05, |
|
"loss": 0.7869, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.097916474595318e-05, |
|
"loss": 0.6983, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.093070275604004e-05, |
|
"loss": 0.6471, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.088221713979485e-05, |
|
"loss": 0.8774, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.083370809037135e-05, |
|
"loss": 0.8367, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.078517580101659e-05, |
|
"loss": 0.8485, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.0736620465070224e-05, |
|
"loss": 0.6802, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.068804227596371e-05, |
|
"loss": 0.7254, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.063944142721954e-05, |
|
"loss": 0.6942, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.0590818112450496e-05, |
|
"loss": 0.7767, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0542172525358833e-05, |
|
"loss": 0.6698, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.049350485973557e-05, |
|
"loss": 0.822, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.044481530945964e-05, |
|
"loss": 0.7565, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0396104068497172e-05, |
|
"loss": 0.7559, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.0347371330900727e-05, |
|
"loss": 0.6684, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0298617290808472e-05, |
|
"loss": 0.767, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.024984214244345e-05, |
|
"loss": 0.8836, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.02010460801128e-05, |
|
"loss": 0.8231, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0152229298206973e-05, |
|
"loss": 0.7628, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0103391991198954e-05, |
|
"loss": 0.7916, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.0054534353643494e-05, |
|
"loss": 0.8659, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.0005656580176355e-05, |
|
"loss": 0.8002, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.995675886551349e-05, |
|
"loss": 0.7897, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.9907841404450315e-05, |
|
"loss": 0.658, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.985890439186088e-05, |
|
"loss": 0.8164, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.9809948022697158e-05, |
|
"loss": 0.6619, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.9760972491988213e-05, |
|
"loss": 0.7705, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.9711977994839448e-05, |
|
"loss": 0.8147, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.966296472643182e-05, |
|
"loss": 0.6752, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.961393288202107e-05, |
|
"loss": 0.7082, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.9564882656936947e-05, |
|
"loss": 0.8384, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9515814246582402e-05, |
|
"loss": 0.8222, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9466727846432856e-05, |
|
"loss": 0.7317, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9417623652035386e-05, |
|
"loss": 0.8472, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.9368501859007948e-05, |
|
"loss": 0.8283, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"eval_loss": 0.688589870929718, |
|
"eval_runtime": 2.8028, |
|
"eval_samples_per_second": 3.211, |
|
"eval_steps_per_second": 0.714, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 2.931936266303863e-05, |
|
"loss": 0.7819, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.9270206259884815e-05, |
|
"loss": 0.6657, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.922103284537247e-05, |
|
"loss": 0.7602, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.91718426153953e-05, |
|
"loss": 0.6639, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.912263576591402e-05, |
|
"loss": 0.7266, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.907341249295554e-05, |
|
"loss": 0.6518, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 2.9024172992612214e-05, |
|
"loss": 0.8892, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.897491746104101e-05, |
|
"loss": 0.8929, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8925646094462794e-05, |
|
"loss": 0.9271, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8876359089161498e-05, |
|
"loss": 0.7644, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8827056641483347e-05, |
|
"loss": 0.87, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 2.8777738947836103e-05, |
|
"loss": 0.7929, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8728406204688253e-05, |
|
"loss": 0.7706, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8679058608568232e-05, |
|
"loss": 0.6968, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.8629696356063663e-05, |
|
"loss": 0.6761, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.858031964382053e-05, |
|
"loss": 0.7972, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 2.853092866854245e-05, |
|
"loss": 0.7544, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8481523626989842e-05, |
|
"loss": 0.7911, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8432104715979152e-05, |
|
"loss": 0.821, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.838267213238211e-05, |
|
"loss": 0.7393, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.8333226073124893e-05, |
|
"loss": 0.7582, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.828376673518735e-05, |
|
"loss": 0.7258, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.8234294315602245e-05, |
|
"loss": 0.8525, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.818480901145447e-05, |
|
"loss": 0.8147, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.813531101988021e-05, |
|
"loss": 0.7536, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.808580053806622e-05, |
|
"loss": 0.8148, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.8036277763249007e-05, |
|
"loss": 0.7732, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 2.798674289271404e-05, |
|
"loss": 0.7813, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.793719612379499e-05, |
|
"loss": 0.7391, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7887637653872916e-05, |
|
"loss": 0.9156, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7838067680375495e-05, |
|
"loss": 0.7615, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7788486400776233e-05, |
|
"loss": 0.8723, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 2.7738894012593662e-05, |
|
"loss": 0.7906, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7689290713390598e-05, |
|
"loss": 0.817, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.76396767007733e-05, |
|
"loss": 0.8896, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7590052172390708e-05, |
|
"loss": 0.7621, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7540417325933655e-05, |
|
"loss": 0.8028, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 2.7490772359134093e-05, |
|
"loss": 0.7157, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.744111746976426e-05, |
|
"loss": 0.8162, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7391452855635958e-05, |
|
"loss": 0.7835, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.7341778714599708e-05, |
|
"loss": 0.7614, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.729209524454399e-05, |
|
"loss": 0.8477, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 2.724240264339445e-05, |
|
"loss": 0.7091, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7192701109113105e-05, |
|
"loss": 0.7915, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7142990839697564e-05, |
|
"loss": 0.7741, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7093272033180222e-05, |
|
"loss": 0.6316, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.704354488762751e-05, |
|
"loss": 0.7003, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6993809601139053e-05, |
|
"loss": 0.7132, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.6944066371846917e-05, |
|
"loss": 0.7494, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.689431539791482e-05, |
|
"loss": 0.7249, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6844556877537304e-05, |
|
"loss": 0.708, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.679479100893901e-05, |
|
"loss": 0.8788, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6745017990373835e-05, |
|
"loss": 0.7451, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 2.6695238020124146e-05, |
|
"loss": 0.7337, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.664545129650004e-05, |
|
"loss": 0.8061, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6595658017838472e-05, |
|
"loss": 0.7314, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6545858382502547e-05, |
|
"loss": 0.7077, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.6496052588880677e-05, |
|
"loss": 0.8084, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 2.644624083538581e-05, |
|
"loss": 0.8214, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6396423320454627e-05, |
|
"loss": 0.7688, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6346600242546793e-05, |
|
"loss": 0.8176, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6296771800144093e-05, |
|
"loss": 0.8393, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6246938191749704e-05, |
|
"loss": 0.9029, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.6197099615887394e-05, |
|
"loss": 0.719, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.614725627110069e-05, |
|
"loss": 0.8233, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6097408355952142e-05, |
|
"loss": 0.7085, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.6047556069022493e-05, |
|
"loss": 0.711, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5997699608909913e-05, |
|
"loss": 0.7867, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.5947839174229198e-05, |
|
"loss": 0.8321, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.589797496361096e-05, |
|
"loss": 0.7958, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.584810717570087e-05, |
|
"loss": 0.6435, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5798236009158837e-05, |
|
"loss": 0.8112, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.574836166265826e-05, |
|
"loss": 0.7119, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5698484334885153e-05, |
|
"loss": 0.7362, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.5648604224537458e-05, |
|
"loss": 0.8092, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.559872153032417e-05, |
|
"loss": 0.6548, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.554883645096459e-05, |
|
"loss": 0.7224, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5498949185187533e-05, |
|
"loss": 0.6886, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5449059931730483e-05, |
|
"loss": 0.7935, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.5399168889338897e-05, |
|
"loss": 0.8734, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.534927625676532e-05, |
|
"loss": 0.6876, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5299382232768633e-05, |
|
"loss": 0.7049, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5249487016113288e-05, |
|
"loss": 0.7669, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.5199590805568463e-05, |
|
"loss": 0.6549, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.51496937999073e-05, |
|
"loss": 0.8337, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5099796197906106e-05, |
|
"loss": 0.7316, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5049898198343586e-05, |
|
"loss": 0.7525, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.8389, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4950101801656416e-05, |
|
"loss": 0.7193, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.490020380209389e-05, |
|
"loss": 0.7287, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.4850306200092714e-05, |
|
"loss": 0.6943, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.480040919443155e-05, |
|
"loss": 0.767, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4750512983886718e-05, |
|
"loss": 0.8149, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.470061776723137e-05, |
|
"loss": 0.7885, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.4650723743234684e-05, |
|
"loss": 0.6995, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.460083111066111e-05, |
|
"loss": 0.8069, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4550940068269522e-05, |
|
"loss": 0.7725, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.450105081481248e-05, |
|
"loss": 0.8232, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4451163549035412e-05, |
|
"loss": 0.7017, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4401278469675835e-05, |
|
"loss": 0.8166, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"eval_loss": 0.6844746470451355, |
|
"eval_runtime": 2.748, |
|
"eval_samples_per_second": 3.275, |
|
"eval_steps_per_second": 0.728, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.4351395775462548e-05, |
|
"loss": 0.84, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4301515665114846e-05, |
|
"loss": 0.7262, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.425163833734175e-05, |
|
"loss": 0.7708, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4201763990841166e-05, |
|
"loss": 0.7251, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.415189282429913e-05, |
|
"loss": 0.7109, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4102025036389046e-05, |
|
"loss": 0.6338, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.4052160825770805e-05, |
|
"loss": 0.7663, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.4002300391090086e-05, |
|
"loss": 0.8332, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.3952443930977513e-05, |
|
"loss": 0.7448, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.3902591644047867e-05, |
|
"loss": 0.7438, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.385274372889932e-05, |
|
"loss": 0.8371, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.380290038411261e-05, |
|
"loss": 0.8334, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3753061808250295e-05, |
|
"loss": 0.7709, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3703228199855916e-05, |
|
"loss": 0.7632, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3653399757453216e-05, |
|
"loss": 0.831, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3603576679545375e-05, |
|
"loss": 0.748, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.3553759164614198e-05, |
|
"loss": 0.8873, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.350394741111933e-05, |
|
"loss": 0.7561, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.3454141617497456e-05, |
|
"loss": 0.7786, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.340434198216154e-05, |
|
"loss": 0.7073, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.335454870349997e-05, |
|
"loss": 0.8041, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.330476197987586e-05, |
|
"loss": 0.6945, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3254982009626168e-05, |
|
"loss": 0.73, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.320520899106099e-05, |
|
"loss": 0.7354, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3155443122462695e-05, |
|
"loss": 0.5653, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3105684602085192e-05, |
|
"loss": 0.7855, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.305593362815309e-05, |
|
"loss": 0.8154, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3006190398860953e-05, |
|
"loss": 0.8139, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2956455112372496e-05, |
|
"loss": 0.7663, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2906727966819777e-05, |
|
"loss": 0.684, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.285700916030244e-05, |
|
"loss": 0.8083, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2807298890886905e-05, |
|
"loss": 0.733, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.2757597356605552e-05, |
|
"loss": 0.8283, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2707904755456015e-05, |
|
"loss": 0.7032, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2658221285400294e-05, |
|
"loss": 0.6825, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2608547144364048e-05, |
|
"loss": 0.9023, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2558882530235753e-05, |
|
"loss": 0.6826, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2509227640865923e-05, |
|
"loss": 0.7528, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2459582674066355e-05, |
|
"loss": 0.747, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2409947827609298e-05, |
|
"loss": 0.8042, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2360323299226704e-05, |
|
"loss": 0.6112, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.2310709286609398e-05, |
|
"loss": 0.7995, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.226110598740634e-05, |
|
"loss": 0.7816, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.221151359922378e-05, |
|
"loss": 0.7245, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.216193231962451e-05, |
|
"loss": 0.8434, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2112362346127093e-05, |
|
"loss": 0.6944, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2062803876205012e-05, |
|
"loss": 0.6996, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.2013257107285963e-05, |
|
"loss": 0.6934, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.1963722236751002e-05, |
|
"loss": 0.8258, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1914199461933783e-05, |
|
"loss": 0.675, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1864688980119795e-05, |
|
"loss": 0.7469, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1815190988545532e-05, |
|
"loss": 0.8251, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.1765705684397754e-05, |
|
"loss": 0.7384, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.171623326481265e-05, |
|
"loss": 0.7057, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1666773926875123e-05, |
|
"loss": 0.747, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1617327867617892e-05, |
|
"loss": 0.7404, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.156789528402085e-05, |
|
"loss": 0.7471, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1518476373010167e-05, |
|
"loss": 0.6522, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.1469071331457556e-05, |
|
"loss": 0.7264, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.1419680356179478e-05, |
|
"loss": 0.8483, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.1370303643936347e-05, |
|
"loss": 0.7971, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.1320941391431777e-05, |
|
"loss": 0.7694, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.127159379531175e-05, |
|
"loss": 0.7246, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.12222610521639e-05, |
|
"loss": 0.7312, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.1172943358516652e-05, |
|
"loss": 0.5753, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.112364091083851e-05, |
|
"loss": 0.7694, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.1074353905537216e-05, |
|
"loss": 0.942, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.1025082538958994e-05, |
|
"loss": 0.6571, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.0975827007387795e-05, |
|
"loss": 0.8069, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 2.0926587507044458e-05, |
|
"loss": 0.7079, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0877364234085984e-05, |
|
"loss": 0.7603, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.082815738460471e-05, |
|
"loss": 0.7821, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0778967154627537e-05, |
|
"loss": 0.7098, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0729793740115187e-05, |
|
"loss": 0.7475, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 2.0680637336961373e-05, |
|
"loss": 0.7824, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.063149814099205e-05, |
|
"loss": 0.7515, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.0582376347964613e-05, |
|
"loss": 0.6972, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.053327215356715e-05, |
|
"loss": 0.7595, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.0484185753417604e-05, |
|
"loss": 0.6972, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.043511734306306e-05, |
|
"loss": 0.7549, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0386067117978934e-05, |
|
"loss": 0.8029, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.033703527356818e-05, |
|
"loss": 0.7691, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.028802200516056e-05, |
|
"loss": 0.6449, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0239027508011796e-05, |
|
"loss": 0.7241, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 2.0190051977302845e-05, |
|
"loss": 0.7877, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.0141095608139125e-05, |
|
"loss": 0.8097, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.009215859554969e-05, |
|
"loss": 0.7039, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 2.0043241134486508e-05, |
|
"loss": 0.843, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.999434341982365e-05, |
|
"loss": 0.7726, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.9945465646356512e-05, |
|
"loss": 0.8044, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.9896608008801056e-05, |
|
"loss": 0.6961, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.9847770701793032e-05, |
|
"loss": 0.7592, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.97989539198872e-05, |
|
"loss": 0.8141, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.975015785755655e-05, |
|
"loss": 0.708, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.970138270919154e-05, |
|
"loss": 0.6981, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.965262866909928e-05, |
|
"loss": 0.761, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.960389593150283e-05, |
|
"loss": 0.7069, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9555184690540366e-05, |
|
"loss": 0.9216, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.9506495140264434e-05, |
|
"loss": 0.741, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.945782747464116e-05, |
|
"loss": 0.7148, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"eval_loss": 0.675603449344635, |
|
"eval_runtime": 2.7591, |
|
"eval_samples_per_second": 3.262, |
|
"eval_steps_per_second": 0.725, |
|
"step": 900 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1575, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 1.2654573152517489e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|