|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.8618832148243913, |
|
"eval_steps": 100, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-05, |
|
"loss": 1.5475, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999997705921923e-05, |
|
"loss": 1.4738, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9999908236919016e-05, |
|
"loss": 1.709, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999979353322567e-05, |
|
"loss": 1.3682, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.99996329483497e-05, |
|
"loss": 1.4525, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999942648258581e-05, |
|
"loss": 1.2902, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999917413631294e-05, |
|
"loss": 1.2581, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9998875909994206e-05, |
|
"loss": 1.1363, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9998531804176926e-05, |
|
"loss": 1.0479, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999814181949263e-05, |
|
"loss": 1.0362, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.999770595665704e-05, |
|
"loss": 0.9415, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999722421647006e-05, |
|
"loss": 1.0849, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999669659981584e-05, |
|
"loss": 0.8538, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999612310766268e-05, |
|
"loss": 0.9404, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9995503741063096e-05, |
|
"loss": 0.9645, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999483850115379e-05, |
|
"loss": 1.0898, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9994127389155623e-05, |
|
"loss": 0.9545, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9993370406373705e-05, |
|
"loss": 0.8432, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999256755419729e-05, |
|
"loss": 0.9946, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999171883409982e-05, |
|
"loss": 0.7908, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999082424763891e-05, |
|
"loss": 0.8971, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998988379645638e-05, |
|
"loss": 0.85, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998889748227819e-05, |
|
"loss": 0.8626, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9987865306914495e-05, |
|
"loss": 0.8335, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9986787272259596e-05, |
|
"loss": 0.9314, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998566338029198e-05, |
|
"loss": 0.8645, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998449363307428e-05, |
|
"loss": 0.9316, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998327803275328e-05, |
|
"loss": 0.805, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998201658155994e-05, |
|
"loss": 1.0418, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.998070928180935e-05, |
|
"loss": 0.649, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.997935613590075e-05, |
|
"loss": 0.8, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.997795714631751e-05, |
|
"loss": 0.8014, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.997651231562715e-05, |
|
"loss": 0.7854, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9975021646481315e-05, |
|
"loss": 0.8782, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997348514161576e-05, |
|
"loss": 0.7564, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997190280385039e-05, |
|
"loss": 0.9438, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.997027463608921e-05, |
|
"loss": 0.9248, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996860064132033e-05, |
|
"loss": 0.76, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9966880822615965e-05, |
|
"loss": 0.8834, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996511518313244e-05, |
|
"loss": 0.8928, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996330372611017e-05, |
|
"loss": 0.8857, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.996144645487364e-05, |
|
"loss": 0.7975, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.995954337283145e-05, |
|
"loss": 0.7815, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9957594483476236e-05, |
|
"loss": 0.8869, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.995559979038474e-05, |
|
"loss": 0.9173, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.995355929721773e-05, |
|
"loss": 0.9258, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.995147300772006e-05, |
|
"loss": 0.896, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994934092572061e-05, |
|
"loss": 0.893, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994716305513232e-05, |
|
"loss": 0.8257, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994493939995214e-05, |
|
"loss": 0.8759, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994266996426107e-05, |
|
"loss": 0.9216, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.994035475222412e-05, |
|
"loss": 0.9097, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9937993768090306e-05, |
|
"loss": 1.015, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.993558701619266e-05, |
|
"loss": 0.8633, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9933134500948205e-05, |
|
"loss": 0.928, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.993063622685794e-05, |
|
"loss": 0.8069, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9928092198506866e-05, |
|
"loss": 0.885, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.992550242056393e-05, |
|
"loss": 0.7342, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.992286689778207e-05, |
|
"loss": 0.8593, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.992018563499814e-05, |
|
"loss": 0.9458, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.991745863713299e-05, |
|
"loss": 0.9511, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.991468590919135e-05, |
|
"loss": 0.8605, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9911867456261916e-05, |
|
"loss": 0.76, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9909003283517283e-05, |
|
"loss": 0.8037, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.990609339621397e-05, |
|
"loss": 0.8945, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.990313779969237e-05, |
|
"loss": 0.7774, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.990013649937679e-05, |
|
"loss": 0.8737, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.989708950077541e-05, |
|
"loss": 0.8914, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9893996809480246e-05, |
|
"loss": 0.8682, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9890858431167226e-05, |
|
"loss": 0.7745, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.988767437159608e-05, |
|
"loss": 1.0306, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.988444463661041e-05, |
|
"loss": 0.904, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.98811692321376e-05, |
|
"loss": 0.8478, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9877848164188904e-05, |
|
"loss": 0.7117, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.987448143885933e-05, |
|
"loss": 0.6789, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.987106906232773e-05, |
|
"loss": 0.9385, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9867611040856695e-05, |
|
"loss": 1.049, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9864107380792594e-05, |
|
"loss": 0.8914, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.986055808856558e-05, |
|
"loss": 0.8211, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9856963170689525e-05, |
|
"loss": 0.9007, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9853322633762045e-05, |
|
"loss": 0.832, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.984963648446449e-05, |
|
"loss": 0.807, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.984590472956191e-05, |
|
"loss": 0.7031, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.984212737590304e-05, |
|
"loss": 0.9261, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.983830443042033e-05, |
|
"loss": 0.7759, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.983443590012989e-05, |
|
"loss": 0.7147, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9830521792131484e-05, |
|
"loss": 0.8554, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.982656211360852e-05, |
|
"loss": 0.8052, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.982255687182806e-05, |
|
"loss": 0.8408, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.981850607414077e-05, |
|
"loss": 0.7086, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.981440972798092e-05, |
|
"loss": 0.7808, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.981026784086639e-05, |
|
"loss": 0.8632, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.980608042039862e-05, |
|
"loss": 0.8798, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.980184747426263e-05, |
|
"loss": 0.9253, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9797569010226995e-05, |
|
"loss": 0.8305, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.979324503614381e-05, |
|
"loss": 0.9474, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.97888755599487e-05, |
|
"loss": 0.8102, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.978446058966081e-05, |
|
"loss": 0.6445, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.978000013338277e-05, |
|
"loss": 0.8123, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.977549419930067e-05, |
|
"loss": 0.9368, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"eval_loss": 0.8984958529472351, |
|
"eval_runtime": 10.1685, |
|
"eval_samples_per_second": 3.639, |
|
"eval_steps_per_second": 0.492, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.97709427956841e-05, |
|
"loss": 0.7673, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.976634593088608e-05, |
|
"loss": 0.8571, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9761703613343045e-05, |
|
"loss": 0.9388, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.975701585157489e-05, |
|
"loss": 0.7665, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9752282654184876e-05, |
|
"loss": 0.9379, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.974750402985966e-05, |
|
"loss": 0.9475, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.974267998736928e-05, |
|
"loss": 0.9757, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.973781053556711e-05, |
|
"loss": 0.6865, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.973289568338989e-05, |
|
"loss": 0.9761, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9727935439857634e-05, |
|
"loss": 0.9132, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.972292981407372e-05, |
|
"loss": 0.774, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.971787881522477e-05, |
|
"loss": 1.0217, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9712782452580695e-05, |
|
"loss": 0.9341, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.970764073549467e-05, |
|
"loss": 0.7135, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.970245367340307e-05, |
|
"loss": 0.7177, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.969722127582553e-05, |
|
"loss": 0.9312, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9691943552364885e-05, |
|
"loss": 0.8677, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.968662051270711e-05, |
|
"loss": 0.7371, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.968125216662142e-05, |
|
"loss": 0.7248, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9675838523960114e-05, |
|
"loss": 0.7281, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.967037959465865e-05, |
|
"loss": 0.8678, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.96648753887356e-05, |
|
"loss": 0.8477, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.965932591629262e-05, |
|
"loss": 0.7801, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.965373118751446e-05, |
|
"loss": 0.8289, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.9648091212668904e-05, |
|
"loss": 0.7771, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.964240600210679e-05, |
|
"loss": 0.7933, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.963667556626198e-05, |
|
"loss": 0.8119, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.963089991565131e-05, |
|
"loss": 0.9761, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9625079060874634e-05, |
|
"loss": 0.7456, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.961921301261474e-05, |
|
"loss": 0.7955, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.961330178163736e-05, |
|
"loss": 0.8379, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.960734537879117e-05, |
|
"loss": 0.793, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.960134381500771e-05, |
|
"loss": 0.8465, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.959529710130144e-05, |
|
"loss": 0.8813, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.958920524876967e-05, |
|
"loss": 0.8148, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9583068268592544e-05, |
|
"loss": 0.8524, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.957688617203302e-05, |
|
"loss": 0.6524, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.957065897043689e-05, |
|
"loss": 0.8021, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9564386675232674e-05, |
|
"loss": 0.9442, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.955806929793171e-05, |
|
"loss": 0.8701, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9551706850128024e-05, |
|
"loss": 0.9975, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.954529934349839e-05, |
|
"loss": 0.7725, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.953884678980225e-05, |
|
"loss": 0.7544, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9532349200881745e-05, |
|
"loss": 0.6881, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9525806588661656e-05, |
|
"loss": 0.8211, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.951921896514938e-05, |
|
"loss": 0.7736, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9512586342434956e-05, |
|
"loss": 0.9176, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.950590873269098e-05, |
|
"loss": 0.7639, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.94991861481726e-05, |
|
"loss": 0.8447, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.949241860121755e-05, |
|
"loss": 0.7537, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.948560610424604e-05, |
|
"loss": 0.7948, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.94787486697608e-05, |
|
"loss": 0.7734, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9471846310347006e-05, |
|
"loss": 0.8796, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.946489903867231e-05, |
|
"loss": 0.7196, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.945790686748678e-05, |
|
"loss": 0.7676, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.945086980962288e-05, |
|
"loss": 0.7697, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9443787877995464e-05, |
|
"loss": 0.8223, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.943666108560173e-05, |
|
"loss": 0.8416, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.942948944552122e-05, |
|
"loss": 0.8536, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.942227297091577e-05, |
|
"loss": 1.0185, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.94150116750295e-05, |
|
"loss": 0.8932, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.94077055711888e-05, |
|
"loss": 0.8254, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.940035467280229e-05, |
|
"loss": 0.7546, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9392958993360794e-05, |
|
"loss": 0.8746, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9385518546437325e-05, |
|
"loss": 0.9356, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.937803334568706e-05, |
|
"loss": 0.8459, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9370503404847304e-05, |
|
"loss": 0.7772, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9362928737737465e-05, |
|
"loss": 0.9711, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.935530935825907e-05, |
|
"loss": 0.849, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.934764528039566e-05, |
|
"loss": 0.759, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.933993651821282e-05, |
|
"loss": 0.7088, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.933218308585819e-05, |
|
"loss": 0.6487, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9324384997561315e-05, |
|
"loss": 0.7925, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.931654226763375e-05, |
|
"loss": 0.922, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9308654910468964e-05, |
|
"loss": 0.8621, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.930072294054233e-05, |
|
"loss": 0.8429, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.929274637241108e-05, |
|
"loss": 0.7388, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9284725220714324e-05, |
|
"loss": 0.8557, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.927665950017297e-05, |
|
"loss": 0.8447, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.926854922558975e-05, |
|
"loss": 0.8104, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9260394411849134e-05, |
|
"loss": 0.9483, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9252195073917335e-05, |
|
"loss": 0.7172, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9243951226842305e-05, |
|
"loss": 0.7127, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9235662885753666e-05, |
|
"loss": 0.7191, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.92273300658627e-05, |
|
"loss": 0.7975, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.921895278246231e-05, |
|
"loss": 0.7981, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.921053105092701e-05, |
|
"loss": 0.7595, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9202064886712906e-05, |
|
"loss": 0.7376, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.919355430535761e-05, |
|
"loss": 0.8999, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9184999322480285e-05, |
|
"loss": 0.8945, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.917639995378156e-05, |
|
"loss": 0.7347, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.916775621504354e-05, |
|
"loss": 0.8662, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.915906812212976e-05, |
|
"loss": 0.6963, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9150335690985136e-05, |
|
"loss": 0.7317, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9141558937635985e-05, |
|
"loss": 0.8358, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.913273787818995e-05, |
|
"loss": 0.7879, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.912387252883598e-05, |
|
"loss": 0.8523, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.911496290584433e-05, |
|
"loss": 0.8389, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.910600902556649e-05, |
|
"loss": 0.7412, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.909701090443518e-05, |
|
"loss": 0.8253, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"eval_loss": 0.8050532341003418, |
|
"eval_runtime": 10.2691, |
|
"eval_samples_per_second": 3.603, |
|
"eval_steps_per_second": 0.487, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.908796855896432e-05, |
|
"loss": 0.7399, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.907888200574898e-05, |
|
"loss": 0.9344, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.906975126146538e-05, |
|
"loss": 0.8562, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.906057634287082e-05, |
|
"loss": 0.7759, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.905135726680369e-05, |
|
"loss": 0.7058, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9042094050183415e-05, |
|
"loss": 0.9686, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.9032786710010424e-05, |
|
"loss": 0.9007, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.902343526336614e-05, |
|
"loss": 0.8106, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.901403972741291e-05, |
|
"loss": 0.8317, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.900460011939402e-05, |
|
"loss": 0.9889, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.899511645663361e-05, |
|
"loss": 0.7061, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.898558875653671e-05, |
|
"loss": 0.9084, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.897601703658914e-05, |
|
"loss": 0.8296, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.896640131435751e-05, |
|
"loss": 0.7512, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8956741607489215e-05, |
|
"loss": 0.7808, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8947037933712335e-05, |
|
"loss": 0.8168, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.893729031083567e-05, |
|
"loss": 0.6904, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8927498756748664e-05, |
|
"loss": 0.8154, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.891766328942138e-05, |
|
"loss": 0.732, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.8907783926904495e-05, |
|
"loss": 0.833, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.889786068732921e-05, |
|
"loss": 0.7308, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8887893588907304e-05, |
|
"loss": 0.8428, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8877882649930995e-05, |
|
"loss": 0.735, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8867827888773e-05, |
|
"loss": 0.6815, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.885772932388642e-05, |
|
"loss": 0.8329, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.884758697380481e-05, |
|
"loss": 0.7601, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8837400857142015e-05, |
|
"loss": 0.7283, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.882717099259224e-05, |
|
"loss": 0.7835, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8816897398929975e-05, |
|
"loss": 0.7538, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.880658009500995e-05, |
|
"loss": 0.6243, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8796219099767136e-05, |
|
"loss": 0.7844, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.878581443221668e-05, |
|
"loss": 0.8362, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.877536611145388e-05, |
|
"loss": 0.7069, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.876487415665413e-05, |
|
"loss": 0.7603, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.875433858707293e-05, |
|
"loss": 0.8936, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.874375942204583e-05, |
|
"loss": 0.7754, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8733136680988355e-05, |
|
"loss": 0.7515, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8722470383396024e-05, |
|
"loss": 0.7706, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.871176054884431e-05, |
|
"loss": 0.7938, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.870100719698854e-05, |
|
"loss": 0.7918, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.8690210347563975e-05, |
|
"loss": 0.9421, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.867937002038564e-05, |
|
"loss": 0.7249, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.866848623534839e-05, |
|
"loss": 0.7552, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.865755901242682e-05, |
|
"loss": 0.7415, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.864658837167526e-05, |
|
"loss": 0.7475, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.863557433322771e-05, |
|
"loss": 0.8791, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.862451691729783e-05, |
|
"loss": 0.8837, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.861341614417887e-05, |
|
"loss": 0.7218, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.860227203424367e-05, |
|
"loss": 0.8089, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8591084607944595e-05, |
|
"loss": 0.9958, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.85798538858135e-05, |
|
"loss": 0.861, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.856857988846172e-05, |
|
"loss": 0.783, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.855726263657999e-05, |
|
"loss": 0.8853, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8545902150938436e-05, |
|
"loss": 0.7947, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8534498452386543e-05, |
|
"loss": 0.7206, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8523051561853084e-05, |
|
"loss": 0.8859, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.851156150034611e-05, |
|
"loss": 0.745, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.850002828895289e-05, |
|
"loss": 0.9045, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8488451948839905e-05, |
|
"loss": 0.6362, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.847683250125277e-05, |
|
"loss": 0.8622, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8465169967516235e-05, |
|
"loss": 0.7037, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.845346436903408e-05, |
|
"loss": 0.9182, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.844171572728919e-05, |
|
"loss": 1.0684, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.842992406384338e-05, |
|
"loss": 0.7909, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.8418089400337444e-05, |
|
"loss": 0.8534, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.840621175849112e-05, |
|
"loss": 0.8092, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.839429116010297e-05, |
|
"loss": 0.7109, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.838232762705044e-05, |
|
"loss": 0.5593, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8370321181289754e-05, |
|
"loss": 0.6169, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.835827184485587e-05, |
|
"loss": 0.7919, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.834617963986251e-05, |
|
"loss": 0.8294, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.833404458850203e-05, |
|
"loss": 0.9067, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.832186671304543e-05, |
|
"loss": 0.8037, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8309646035842316e-05, |
|
"loss": 0.7649, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.829738257932084e-05, |
|
"loss": 0.736, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8285076365987646e-05, |
|
"loss": 0.6717, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.827272741842789e-05, |
|
"loss": 0.8744, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.826033575930512e-05, |
|
"loss": 0.8303, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.824790141136127e-05, |
|
"loss": 0.8506, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.823542439741666e-05, |
|
"loss": 0.7496, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.822290474036987e-05, |
|
"loss": 0.8215, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.821034246319774e-05, |
|
"loss": 0.8849, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.819773758895538e-05, |
|
"loss": 0.7652, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.818509014077602e-05, |
|
"loss": 0.7457, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.817240014187105e-05, |
|
"loss": 0.7701, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8159667615529954e-05, |
|
"loss": 0.7975, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.814689258512025e-05, |
|
"loss": 0.7421, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.813407507408748e-05, |
|
"loss": 0.771, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8121215105955145e-05, |
|
"loss": 0.7896, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.8108312704324654e-05, |
|
"loss": 0.6527, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.8095367892875295e-05, |
|
"loss": 0.7029, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.80823806953642e-05, |
|
"loss": 0.7148, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.80693511356263e-05, |
|
"loss": 0.7192, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.8056279237574234e-05, |
|
"loss": 0.8897, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.8043165025198375e-05, |
|
"loss": 0.9168, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.803000852256675e-05, |
|
"loss": 0.6315, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.801680975382499e-05, |
|
"loss": 0.8135, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.800356874319629e-05, |
|
"loss": 0.8978, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.79902855149814e-05, |
|
"loss": 0.8826, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.797696009355851e-05, |
|
"loss": 0.8169, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"eval_loss": 0.795865535736084, |
|
"eval_runtime": 10.328, |
|
"eval_samples_per_second": 3.582, |
|
"eval_steps_per_second": 0.484, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.796359250338328e-05, |
|
"loss": 0.7584, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.795018276898874e-05, |
|
"loss": 0.7721, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.793673091498527e-05, |
|
"loss": 0.7665, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7923236966060566e-05, |
|
"loss": 0.8482, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7909700946979555e-05, |
|
"loss": 0.6749, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7896122882584374e-05, |
|
"loss": 0.5408, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7882502797794346e-05, |
|
"loss": 0.8174, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7868840717605914e-05, |
|
"loss": 0.8283, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.785513666709256e-05, |
|
"loss": 0.7986, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.784139067140483e-05, |
|
"loss": 0.7339, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7827602755770224e-05, |
|
"loss": 0.6881, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.7813772945493185e-05, |
|
"loss": 0.7879, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.779990126595505e-05, |
|
"loss": 0.9105, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.778598774261399e-05, |
|
"loss": 0.6961, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.777203240100497e-05, |
|
"loss": 0.7347, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.775803526673971e-05, |
|
"loss": 0.8124, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7743996365506614e-05, |
|
"loss": 0.8755, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.772991572307076e-05, |
|
"loss": 0.6714, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.771579336527383e-05, |
|
"loss": 0.7593, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7701629318034034e-05, |
|
"loss": 0.7754, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.768742360734614e-05, |
|
"loss": 0.8489, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7673176259281336e-05, |
|
"loss": 0.7423, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7658887299987265e-05, |
|
"loss": 0.9569, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7644556755687905e-05, |
|
"loss": 0.6688, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.763018465268356e-05, |
|
"loss": 0.688, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7615771017350826e-05, |
|
"loss": 0.8766, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.76013158761425e-05, |
|
"loss": 0.815, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.758681925558756e-05, |
|
"loss": 0.6949, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.75722811822911e-05, |
|
"loss": 0.6397, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.755770168293432e-05, |
|
"loss": 0.8287, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7543080784274414e-05, |
|
"loss": 0.831, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.752841851314457e-05, |
|
"loss": 0.8885, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.751371489645391e-05, |
|
"loss": 0.7274, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7498969961187424e-05, |
|
"loss": 0.6892, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.748418373440594e-05, |
|
"loss": 0.7592, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.7469356243246066e-05, |
|
"loss": 0.7485, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.745448751492014e-05, |
|
"loss": 0.7914, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7439577576716175e-05, |
|
"loss": 0.9029, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.742462645599783e-05, |
|
"loss": 0.6633, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.740963418020433e-05, |
|
"loss": 0.7987, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.739460077685043e-05, |
|
"loss": 0.9075, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.737952627352639e-05, |
|
"loss": 0.7466, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.736441069789786e-05, |
|
"loss": 0.6737, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.734925407770589e-05, |
|
"loss": 0.7996, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.733405644076688e-05, |
|
"loss": 0.8677, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.731881781497246e-05, |
|
"loss": 0.8364, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7303538228289504e-05, |
|
"loss": 0.7339, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.728821770876008e-05, |
|
"loss": 0.7552, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.727285628450136e-05, |
|
"loss": 0.6216, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.725745398370558e-05, |
|
"loss": 0.7541, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.724201083464e-05, |
|
"loss": 0.7895, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.722652686564687e-05, |
|
"loss": 0.8574, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.721100210514333e-05, |
|
"loss": 0.7342, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.719543658162139e-05, |
|
"loss": 0.676, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.717983032364787e-05, |
|
"loss": 0.7245, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.716418335986434e-05, |
|
"loss": 0.7016, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7148495718987096e-05, |
|
"loss": 0.8397, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.7132767429807076e-05, |
|
"loss": 0.9152, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.711699852118981e-05, |
|
"loss": 0.7737, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.710118902207541e-05, |
|
"loss": 0.7912, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.708533896147842e-05, |
|
"loss": 0.711, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.706944836848789e-05, |
|
"loss": 0.9113, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7053517272267214e-05, |
|
"loss": 0.736, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.703754570205413e-05, |
|
"loss": 0.8455, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7021533687160676e-05, |
|
"loss": 0.9134, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.7005481256973097e-05, |
|
"loss": 0.9046, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.698938844095181e-05, |
|
"loss": 0.9463, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.6973255268631366e-05, |
|
"loss": 0.7204, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.6957081769620356e-05, |
|
"loss": 0.7333, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.694086797360141e-05, |
|
"loss": 0.8651, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.692461391033109e-05, |
|
"loss": 0.8249, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.690831960963988e-05, |
|
"loss": 0.6987, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.6891985101432084e-05, |
|
"loss": 0.6567, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.687561041568582e-05, |
|
"loss": 0.7595, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.685919558245293e-05, |
|
"loss": 0.8601, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.684274063185894e-05, |
|
"loss": 0.6274, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.682624559410301e-05, |
|
"loss": 0.6681, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.680971049945786e-05, |
|
"loss": 0.7932, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.679313537826973e-05, |
|
"loss": 0.7183, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.677652026095831e-05, |
|
"loss": 0.6897, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.675986517801672e-05, |
|
"loss": 0.7922, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.674317016001138e-05, |
|
"loss": 0.8051, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.672643523758206e-05, |
|
"loss": 0.6802, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6709660441441705e-05, |
|
"loss": 0.7451, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6692845802376494e-05, |
|
"loss": 0.8396, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.66759913512457e-05, |
|
"loss": 0.9847, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.665909711898164e-05, |
|
"loss": 0.8155, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.664216313658969e-05, |
|
"loss": 0.7643, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6625189435148146e-05, |
|
"loss": 0.7872, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6608176045808206e-05, |
|
"loss": 0.7877, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.659112299979389e-05, |
|
"loss": 0.5715, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.657403032840203e-05, |
|
"loss": 0.7811, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6556898063002164e-05, |
|
"loss": 0.5627, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.653972623503649e-05, |
|
"loss": 0.6729, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6522514876019816e-05, |
|
"loss": 0.6761, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6505264017539516e-05, |
|
"loss": 0.7337, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.648797369125543e-05, |
|
"loss": 0.7986, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.647064392889985e-05, |
|
"loss": 0.7348, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6453274762277435e-05, |
|
"loss": 0.8767, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.643586622326516e-05, |
|
"loss": 0.7493, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 0.7891781330108643, |
|
"eval_runtime": 10.3744, |
|
"eval_samples_per_second": 3.566, |
|
"eval_steps_per_second": 0.482, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.641841834381228e-05, |
|
"loss": 0.9408, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.640093115594022e-05, |
|
"loss": 0.7631, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.638340469174256e-05, |
|
"loss": 0.8535, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.636583898338497e-05, |
|
"loss": 0.7969, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.6348234063105124e-05, |
|
"loss": 0.6849, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.633058996321268e-05, |
|
"loss": 0.698, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.631290671608919e-05, |
|
"loss": 0.7716, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.629518435418805e-05, |
|
"loss": 0.7092, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.627742291003445e-05, |
|
"loss": 0.6969, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.62596224162253e-05, |
|
"loss": 0.6474, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.624178290542917e-05, |
|
"loss": 0.6569, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.622390441038627e-05, |
|
"loss": 0.8783, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.620598696390831e-05, |
|
"loss": 0.7998, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.61880305988785e-05, |
|
"loss": 0.7375, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.61700353482515e-05, |
|
"loss": 0.7961, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.615200124505331e-05, |
|
"loss": 0.7244, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.613392832238124e-05, |
|
"loss": 0.8139, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.611581661340386e-05, |
|
"loss": 0.7845, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.609766615136089e-05, |
|
"loss": 0.7492, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.607947696956321e-05, |
|
"loss": 0.7373, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.606124910139272e-05, |
|
"loss": 0.7485, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.604298258030237e-05, |
|
"loss": 0.568, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.6024677439816e-05, |
|
"loss": 0.8351, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.600633371352836e-05, |
|
"loss": 0.6451, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.598795143510499e-05, |
|
"loss": 0.7567, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.59695306382822e-05, |
|
"loss": 0.728, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.595107135686699e-05, |
|
"loss": 0.8118, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.5932573624737e-05, |
|
"loss": 0.7179, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.5914037475840385e-05, |
|
"loss": 1.0522, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.589546294419589e-05, |
|
"loss": 0.8786, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.587685006389262e-05, |
|
"loss": 0.6876, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.58581988690901e-05, |
|
"loss": 0.6063, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.583950939401819e-05, |
|
"loss": 0.7466, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5820781672976955e-05, |
|
"loss": 0.7848, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.58020157403367e-05, |
|
"loss": 0.837, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.578321163053783e-05, |
|
"loss": 0.7999, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.576436937809081e-05, |
|
"loss": 0.7283, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5745489017576124e-05, |
|
"loss": 0.8953, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5726570583644205e-05, |
|
"loss": 0.7776, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.570761411101533e-05, |
|
"loss": 0.7233, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5688619634479604e-05, |
|
"loss": 0.8226, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.566958718889688e-05, |
|
"loss": 0.6558, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.565051680919669e-05, |
|
"loss": 0.7776, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.563140853037817e-05, |
|
"loss": 0.6725, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.561226238751005e-05, |
|
"loss": 0.7942, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5593078415730515e-05, |
|
"loss": 0.7764, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.55738566502472e-05, |
|
"loss": 0.9007, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.5554597126337076e-05, |
|
"loss": 0.7221, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.553529987934643e-05, |
|
"loss": 0.809, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.551596494469077e-05, |
|
"loss": 0.7276, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.549659235785478e-05, |
|
"loss": 0.7519, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.547718215439224e-05, |
|
"loss": 0.7447, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.545773436992598e-05, |
|
"loss": 0.6342, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.543824904014777e-05, |
|
"loss": 0.6875, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5418726200818316e-05, |
|
"loss": 0.7269, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.539916588776715e-05, |
|
"loss": 0.7012, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5379568136892567e-05, |
|
"loss": 0.8766, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5359932984161605e-05, |
|
"loss": 0.7997, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.534026046560992e-05, |
|
"loss": 0.709, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5320550617341726e-05, |
|
"loss": 0.7729, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.530080347552979e-05, |
|
"loss": 0.7965, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.528101907641529e-05, |
|
"loss": 0.8593, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5261197456307794e-05, |
|
"loss": 0.7322, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.524133865158518e-05, |
|
"loss": 0.7928, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.522144269869355e-05, |
|
"loss": 0.7176, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5201509634147224e-05, |
|
"loss": 0.7317, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.518153949452858e-05, |
|
"loss": 0.8163, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.51615323164881e-05, |
|
"loss": 0.7572, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5141488136744174e-05, |
|
"loss": 0.7409, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.512140699208315e-05, |
|
"loss": 0.8279, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.51012889193592e-05, |
|
"loss": 0.6798, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.508113395549426e-05, |
|
"loss": 0.6618, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.506094213747798e-05, |
|
"loss": 0.8773, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.504071350236766e-05, |
|
"loss": 0.8261, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.5020448087288126e-05, |
|
"loss": 0.6215, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.5000145929431756e-05, |
|
"loss": 0.8396, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.497980706605833e-05, |
|
"loss": 0.7632, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.495943153449501e-05, |
|
"loss": 0.7404, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.4939019372136224e-05, |
|
"loss": 0.8391, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.491857061644367e-05, |
|
"loss": 0.7786, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.489808530494617e-05, |
|
"loss": 0.7974, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.487756347523965e-05, |
|
"loss": 0.8695, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.4857005164987044e-05, |
|
"loss": 0.6575, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.483641041191826e-05, |
|
"loss": 0.8302, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.481577925383007e-05, |
|
"loss": 0.8316, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.479511172858607e-05, |
|
"loss": 0.8911, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.4774407874116584e-05, |
|
"loss": 0.7963, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.475366772841862e-05, |
|
"loss": 0.7823, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.47328913295558e-05, |
|
"loss": 0.8895, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.4712078715658254e-05, |
|
"loss": 0.7182, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.46912299249226e-05, |
|
"loss": 0.8522, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.467034499561185e-05, |
|
"loss": 0.7836, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.46494239660553e-05, |
|
"loss": 0.6369, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.462846687464857e-05, |
|
"loss": 0.7994, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.4607473759853384e-05, |
|
"loss": 0.7834, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.458644466019764e-05, |
|
"loss": 0.741, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.456537961427526e-05, |
|
"loss": 0.7608, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.454427866074612e-05, |
|
"loss": 0.8057, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.452314183833601e-05, |
|
"loss": 0.8335, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.450196918583655e-05, |
|
"loss": 0.7366, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_loss": 0.7860936522483826, |
|
"eval_runtime": 10.1703, |
|
"eval_samples_per_second": 3.638, |
|
"eval_steps_per_second": 0.492, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.448076074210511e-05, |
|
"loss": 0.703, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.445951654606476e-05, |
|
"loss": 0.8081, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.443823663670416e-05, |
|
"loss": 0.5891, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.441692105307754e-05, |
|
"loss": 0.7695, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.43955698343046e-05, |
|
"loss": 0.7281, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4374183019570404e-05, |
|
"loss": 0.8165, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.435276064812539e-05, |
|
"loss": 0.9928, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4331302759285225e-05, |
|
"loss": 0.698, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4309809392430776e-05, |
|
"loss": 0.6933, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4288280587007994e-05, |
|
"loss": 0.7442, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4266716382527904e-05, |
|
"loss": 0.9552, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.424511681856648e-05, |
|
"loss": 0.7013, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.422348193476458e-05, |
|
"loss": 0.648, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4201811770827905e-05, |
|
"loss": 0.6661, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.418010636652689e-05, |
|
"loss": 0.8251, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.415836576169664e-05, |
|
"loss": 0.6331, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.413658999623689e-05, |
|
"loss": 0.79, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.4114779110111866e-05, |
|
"loss": 0.7234, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.409293314335028e-05, |
|
"loss": 0.8827, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.40710521360452e-05, |
|
"loss": 0.7483, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.404913612835404e-05, |
|
"loss": 0.8601, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.402718516049842e-05, |
|
"loss": 0.7445, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.4005199272764105e-05, |
|
"loss": 0.8318, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.3983178505501e-05, |
|
"loss": 0.8368, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.3961122899122975e-05, |
|
"loss": 0.7101, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.3939032494107855e-05, |
|
"loss": 0.7236, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.391690733099735e-05, |
|
"loss": 0.7242, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.389474745039691e-05, |
|
"loss": 0.7646, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.387255289297576e-05, |
|
"loss": 0.7416, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.385032369946673e-05, |
|
"loss": 0.8072, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.382805991066622e-05, |
|
"loss": 0.7637, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.380576156743412e-05, |
|
"loss": 0.8242, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.378342871069376e-05, |
|
"loss": 0.7322, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.376106138143178e-05, |
|
"loss": 0.7279, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.373865962069811e-05, |
|
"loss": 0.8358, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.371622346960584e-05, |
|
"loss": 0.8739, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.3693752969331224e-05, |
|
"loss": 0.6559, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.367124816111351e-05, |
|
"loss": 0.7295, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.364870908625493e-05, |
|
"loss": 0.8219, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.36261357861206e-05, |
|
"loss": 0.7993, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.360352830213846e-05, |
|
"loss": 0.7536, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.358088667579917e-05, |
|
"loss": 0.8467, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.355821094865606e-05, |
|
"loss": 0.8671, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.3535501162325034e-05, |
|
"loss": 0.7898, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.351275735848451e-05, |
|
"loss": 0.8136, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.348997957887535e-05, |
|
"loss": 0.8536, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.346716786530075e-05, |
|
"loss": 0.9403, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.344432225962619e-05, |
|
"loss": 0.7021, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.342144280377936e-05, |
|
"loss": 0.7304, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.339852953975005e-05, |
|
"loss": 0.7546, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.337558250959013e-05, |
|
"loss": 0.7948, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.335260175541342e-05, |
|
"loss": 0.8696, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.332958731939562e-05, |
|
"loss": 0.8357, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.330653924377428e-05, |
|
"loss": 0.7318, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.328345757084866e-05, |
|
"loss": 0.7246, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.32603423429797e-05, |
|
"loss": 0.7, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3237193602589885e-05, |
|
"loss": 0.7644, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3214011392163244e-05, |
|
"loss": 0.7991, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.319079575424523e-05, |
|
"loss": 0.8297, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3167546731442605e-05, |
|
"loss": 0.6975, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3144264366423445e-05, |
|
"loss": 0.7131, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3120948701917e-05, |
|
"loss": 0.7741, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3097599780713625e-05, |
|
"loss": 0.8072, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.307421764566473e-05, |
|
"loss": 0.7055, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3050802339682665e-05, |
|
"loss": 0.8643, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.302735390574066e-05, |
|
"loss": 0.6969, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.3003872386872744e-05, |
|
"loss": 0.7327, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.2980357826173665e-05, |
|
"loss": 0.8235, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.295681026679883e-05, |
|
"loss": 0.7095, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2933229751964175e-05, |
|
"loss": 0.7745, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.290961632494614e-05, |
|
"loss": 0.7294, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2885970029081555e-05, |
|
"loss": 0.7303, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2862290907767585e-05, |
|
"loss": 0.7418, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.283857900446163e-05, |
|
"loss": 0.8628, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2814834362681255e-05, |
|
"loss": 0.7715, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.279105702600412e-05, |
|
"loss": 0.675, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.276724703806786e-05, |
|
"loss": 0.7764, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2743404442570065e-05, |
|
"loss": 0.6354, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2719529283268156e-05, |
|
"loss": 0.8398, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.269562160397931e-05, |
|
"loss": 0.836, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2671681448580404e-05, |
|
"loss": 0.6738, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.264770886100789e-05, |
|
"loss": 0.7566, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2623703885257774e-05, |
|
"loss": 0.8301, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.259966656538548e-05, |
|
"loss": 0.7379, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2575596945505794e-05, |
|
"loss": 0.6814, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.255149506979279e-05, |
|
"loss": 0.7451, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.2527360982479735e-05, |
|
"loss": 0.6818, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.250319472785902e-05, |
|
"loss": 0.8787, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.247899635028206e-05, |
|
"loss": 0.7882, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.245476589415923e-05, |
|
"loss": 0.7852, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.243050340395977e-05, |
|
"loss": 0.8433, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.240620892421172e-05, |
|
"loss": 0.8116, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.238188249950185e-05, |
|
"loss": 0.7848, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.235752417447549e-05, |
|
"loss": 0.8715, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.233313399383659e-05, |
|
"loss": 0.6482, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.230871200234754e-05, |
|
"loss": 0.6538, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2284258244829076e-05, |
|
"loss": 0.7536, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2259772766160276e-05, |
|
"loss": 0.7742, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2235255611278425e-05, |
|
"loss": 0.7807, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.221070682517894e-05, |
|
"loss": 0.7651, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_loss": 0.7837547063827515, |
|
"eval_runtime": 10.1825, |
|
"eval_samples_per_second": 3.634, |
|
"eval_steps_per_second": 0.491, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2186126452915256e-05, |
|
"loss": 0.8788, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.216151453959885e-05, |
|
"loss": 0.7653, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2136871130399015e-05, |
|
"loss": 0.8086, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.211219627054288e-05, |
|
"loss": 0.6593, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.2087490005315293e-05, |
|
"loss": 0.864, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.206275238005873e-05, |
|
"loss": 0.7781, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.203798344017324e-05, |
|
"loss": 0.8283, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.201318323111631e-05, |
|
"loss": 0.8298, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.1988351798402846e-05, |
|
"loss": 0.7659, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.1963489187605034e-05, |
|
"loss": 0.8664, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.193859544435229e-05, |
|
"loss": 0.7306, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.191367061433118e-05, |
|
"loss": 0.7797, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.18887147432853e-05, |
|
"loss": 0.6415, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.186372787701522e-05, |
|
"loss": 0.8717, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.183871006137841e-05, |
|
"loss": 0.7534, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.181366134228912e-05, |
|
"loss": 0.7723, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.1788581765718316e-05, |
|
"loss": 0.7216, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.176347137769362e-05, |
|
"loss": 0.8546, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.1738330224299165e-05, |
|
"loss": 0.7798, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.171315835167558e-05, |
|
"loss": 0.7227, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.1687955806019864e-05, |
|
"loss": 0.6509, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.166272263358528e-05, |
|
"loss": 0.8394, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.163745888068134e-05, |
|
"loss": 0.7652, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.161216459367366e-05, |
|
"loss": 0.7998, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.158683981898389e-05, |
|
"loss": 0.801, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.156148460308964e-05, |
|
"loss": 0.6806, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.153609899252439e-05, |
|
"loss": 0.7841, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.15106830338774e-05, |
|
"loss": 0.7259, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.148523677379361e-05, |
|
"loss": 0.6297, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.14597602589736e-05, |
|
"loss": 0.8397, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.143425353617346e-05, |
|
"loss": 0.7199, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.1408716652204716e-05, |
|
"loss": 0.759, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.138314965393425e-05, |
|
"loss": 0.6946, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.135755258828422e-05, |
|
"loss": 0.7367, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.133192550223196e-05, |
|
"loss": 0.7487, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.130626844280989e-05, |
|
"loss": 0.7692, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.128058145710547e-05, |
|
"loss": 0.6799, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.125486459226104e-05, |
|
"loss": 0.7387, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.12291178954738e-05, |
|
"loss": 0.8323, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.12033414139957e-05, |
|
"loss": 0.7543, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.117753519513336e-05, |
|
"loss": 0.6675, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.115169928624794e-05, |
|
"loss": 0.7366, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.1125833734755114e-05, |
|
"loss": 0.7067, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.109993858812499e-05, |
|
"loss": 0.8237, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.107401389388193e-05, |
|
"loss": 0.7359, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.104805969960456e-05, |
|
"loss": 0.7633, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.1022076052925637e-05, |
|
"loss": 0.7281, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.099606300153198e-05, |
|
"loss": 0.7322, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.097002059316436e-05, |
|
"loss": 0.905, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0943948875617433e-05, |
|
"loss": 0.6952, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0917847896739644e-05, |
|
"loss": 0.7417, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.089171770443313e-05, |
|
"loss": 0.7173, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0865558346653664e-05, |
|
"loss": 0.8249, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0839369871410524e-05, |
|
"loss": 0.7726, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.081315232676646e-05, |
|
"loss": 0.7356, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0786905760837514e-05, |
|
"loss": 0.7732, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0760630221793036e-05, |
|
"loss": 0.9094, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.073432575785554e-05, |
|
"loss": 0.8432, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.0707992417300616e-05, |
|
"loss": 0.7868, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.068163024845686e-05, |
|
"loss": 0.8738, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.065523929970578e-05, |
|
"loss": 0.7762, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.062881961948167e-05, |
|
"loss": 0.9031, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.06023712562716e-05, |
|
"loss": 0.8335, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.057589425861525e-05, |
|
"loss": 0.7018, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.054938867510485e-05, |
|
"loss": 0.785, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.0522854554385116e-05, |
|
"loss": 0.7981, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.049629194515312e-05, |
|
"loss": 0.8843, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.046970089615822e-05, |
|
"loss": 0.737, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.044308145620197e-05, |
|
"loss": 0.8855, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.0416433674138024e-05, |
|
"loss": 0.7986, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.038975759887207e-05, |
|
"loss": 0.8071, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.0363053279361696e-05, |
|
"loss": 0.8288, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.033632076461634e-05, |
|
"loss": 0.8119, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.0309560103697184e-05, |
|
"loss": 0.7528, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.028277134571706e-05, |
|
"loss": 0.7865, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.025595453984038e-05, |
|
"loss": 0.7289, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.022910973528301e-05, |
|
"loss": 0.8222, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.020223698131221e-05, |
|
"loss": 0.8173, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.017533632724656e-05, |
|
"loss": 0.7644, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.0148407822455794e-05, |
|
"loss": 0.8409, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.012145151636079e-05, |
|
"loss": 0.665, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.009446745843346e-05, |
|
"loss": 0.8076, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.006745569819662e-05, |
|
"loss": 0.7214, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.004041628522394e-05, |
|
"loss": 0.7404, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.001334926913985e-05, |
|
"loss": 0.6325, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.998625469961942e-05, |
|
"loss": 0.7213, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9959132626388294e-05, |
|
"loss": 0.6753, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9931983099222595e-05, |
|
"loss": 0.8213, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.990480616794884e-05, |
|
"loss": 0.6437, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.987760188244382e-05, |
|
"loss": 0.8144, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.985037029263454e-05, |
|
"loss": 0.6676, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.982311144849813e-05, |
|
"loss": 0.7082, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.97958254000617e-05, |
|
"loss": 0.7795, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.976851219740234e-05, |
|
"loss": 0.88, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.974117189064691e-05, |
|
"loss": 0.7574, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.971380452997208e-05, |
|
"loss": 0.7663, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9686410165604124e-05, |
|
"loss": 0.6625, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.965898884781889e-05, |
|
"loss": 0.8158, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.963154062694169e-05, |
|
"loss": 0.7573, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9604065553347224e-05, |
|
"loss": 0.6229, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"eval_loss": 0.7801283001899719, |
|
"eval_runtime": 10.3197, |
|
"eval_samples_per_second": 3.585, |
|
"eval_steps_per_second": 0.485, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.957656367745947e-05, |
|
"loss": 0.6979, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.954903504975156e-05, |
|
"loss": 0.7313, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.952147972074578e-05, |
|
"loss": 0.723, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.949389774101337e-05, |
|
"loss": 0.7319, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.946628916117452e-05, |
|
"loss": 0.651, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.9438654031898204e-05, |
|
"loss": 1.0151, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.941099240390215e-05, |
|
"loss": 0.6635, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9383304327952696e-05, |
|
"loss": 0.7306, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.935558985486474e-05, |
|
"loss": 0.7589, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9327849035501604e-05, |
|
"loss": 0.7903, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9300081920774966e-05, |
|
"loss": 0.7878, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9272288561644786e-05, |
|
"loss": 0.8944, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9244469009119165e-05, |
|
"loss": 1.1365, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.921662331425429e-05, |
|
"loss": 0.747, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.918875152815431e-05, |
|
"loss": 0.8827, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.916085370197127e-05, |
|
"loss": 0.7942, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9132929886905e-05, |
|
"loss": 0.8337, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.910498013420304e-05, |
|
"loss": 0.7707, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.907700449516051e-05, |
|
"loss": 0.8093, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9049003021120055e-05, |
|
"loss": 0.7472, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.9020975763471734e-05, |
|
"loss": 0.9015, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.8992922773652915e-05, |
|
"loss": 0.6248, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.89648441031482e-05, |
|
"loss": 0.815, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.893673980348932e-05, |
|
"loss": 0.6683, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.890860992625504e-05, |
|
"loss": 0.7272, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.8880454523071064e-05, |
|
"loss": 0.7075, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.885227364560995e-05, |
|
"loss": 0.6867, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.882406734559101e-05, |
|
"loss": 0.7838, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.879583567478019e-05, |
|
"loss": 0.8641, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.876757868499003e-05, |
|
"loss": 0.6906, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.873929642807953e-05, |
|
"loss": 0.8569, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.871098895595404e-05, |
|
"loss": 0.7502, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8682656320565204e-05, |
|
"loss": 0.8316, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.865429857391084e-05, |
|
"loss": 0.7405, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8625915768034873e-05, |
|
"loss": 0.756, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.859750795502718e-05, |
|
"loss": 0.7492, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8569075187023575e-05, |
|
"loss": 0.8525, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8540617516205624e-05, |
|
"loss": 0.9735, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.851213499480064e-05, |
|
"loss": 0.7838, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8483627675081524e-05, |
|
"loss": 0.9609, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8455095609366693e-05, |
|
"loss": 0.8497, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8426538850019964e-05, |
|
"loss": 0.7637, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.83979574494505e-05, |
|
"loss": 0.6037, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.836935146011266e-05, |
|
"loss": 0.8975, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.834072093450594e-05, |
|
"loss": 0.6858, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.831206592517488e-05, |
|
"loss": 0.8235, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.828338648470895e-05, |
|
"loss": 0.7814, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.825468266574243e-05, |
|
"loss": 0.7573, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.8225954520954364e-05, |
|
"loss": 0.755, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.819720210306845e-05, |
|
"loss": 0.7047, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.816842546485292e-05, |
|
"loss": 0.7636, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.813962465912045e-05, |
|
"loss": 0.8064, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.811079973872809e-05, |
|
"loss": 0.8318, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.808195075657711e-05, |
|
"loss": 0.9713, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.805307776561299e-05, |
|
"loss": 0.7643, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.802418081882524e-05, |
|
"loss": 0.7808, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.799525996924733e-05, |
|
"loss": 0.7491, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.796631526995663e-05, |
|
"loss": 0.8423, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.793734677407423e-05, |
|
"loss": 0.7888, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.790835453476495e-05, |
|
"loss": 0.77, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.7879338605237146e-05, |
|
"loss": 0.7197, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.7850299038742673e-05, |
|
"loss": 0.7985, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.782123588857675e-05, |
|
"loss": 0.8538, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.779214920807788e-05, |
|
"loss": 0.7602, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.7763039050627767e-05, |
|
"loss": 0.8949, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.773390546965119e-05, |
|
"loss": 0.7133, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.7704748518615915e-05, |
|
"loss": 0.854, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.767556825103259e-05, |
|
"loss": 0.6997, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.7646364720454675e-05, |
|
"loss": 0.9395, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.761713798047831e-05, |
|
"loss": 0.6775, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.758788808474224e-05, |
|
"loss": 0.8521, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.755861508692769e-05, |
|
"loss": 0.8262, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.75293190407583e-05, |
|
"loss": 0.7014, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 0.6883, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.747065801846093e-05, |
|
"loss": 0.8857, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.744129314999133e-05, |
|
"loss": 0.7464, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.741190544848343e-05, |
|
"loss": 0.7916, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.7382494967871386e-05, |
|
"loss": 0.7719, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.735306176213114e-05, |
|
"loss": 0.6991, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.732360588528036e-05, |
|
"loss": 0.6965, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.72941273913783e-05, |
|
"loss": 0.8097, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.726462633452574e-05, |
|
"loss": 0.7607, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.723510276886487e-05, |
|
"loss": 0.7492, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.720555674857916e-05, |
|
"loss": 0.7734, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.7175988327893317e-05, |
|
"loss": 0.8125, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.714639756107317e-05, |
|
"loss": 0.7121, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.711678450242554e-05, |
|
"loss": 0.6998, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.7087149206298154e-05, |
|
"loss": 0.7277, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.7057491727079554e-05, |
|
"loss": 0.6993, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.702781211919901e-05, |
|
"loss": 0.7536, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6998110437126384e-05, |
|
"loss": 0.7398, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6968386735372055e-05, |
|
"loss": 0.7772, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6938641068486826e-05, |
|
"loss": 0.6984, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.69088734910618e-05, |
|
"loss": 0.6349, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6879084057728304e-05, |
|
"loss": 0.7508, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.684927282315774e-05, |
|
"loss": 0.7468, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.681943984206158e-05, |
|
"loss": 0.655, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.678958516919114e-05, |
|
"loss": 0.7449, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6759708859337614e-05, |
|
"loss": 0.9635, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.6729810967331856e-05, |
|
"loss": 0.8153, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_loss": 0.7773626446723938, |
|
"eval_runtime": 10.1729, |
|
"eval_samples_per_second": 3.637, |
|
"eval_steps_per_second": 0.492, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6699891548044336e-05, |
|
"loss": 0.8183, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6669950656385065e-05, |
|
"loss": 0.7709, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.663998834730341e-05, |
|
"loss": 0.6341, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.66100046757881e-05, |
|
"loss": 0.826, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.657999969686702e-05, |
|
"loss": 0.7063, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.654997346560719e-05, |
|
"loss": 0.6664, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.651992603711462e-05, |
|
"loss": 0.8019, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.648985746653424e-05, |
|
"loss": 0.7915, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.645976780904975e-05, |
|
"loss": 0.776, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6429657119883585e-05, |
|
"loss": 0.8638, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.639952545429676e-05, |
|
"loss": 0.7179, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.636937286758878e-05, |
|
"loss": 0.705, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.633919941509757e-05, |
|
"loss": 0.8691, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.630900515219932e-05, |
|
"loss": 0.792, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6278790134308446e-05, |
|
"loss": 0.8007, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6248554416877414e-05, |
|
"loss": 0.8518, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.621829805539671e-05, |
|
"loss": 0.7176, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.618802110539471e-05, |
|
"loss": 0.6979, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6157723622437544e-05, |
|
"loss": 0.7744, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.612740566212906e-05, |
|
"loss": 0.7161, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.6097067280110665e-05, |
|
"loss": 0.8242, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.606670853206125e-05, |
|
"loss": 0.7249, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.60363294736971e-05, |
|
"loss": 0.7814, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.6005930160771755e-05, |
|
"loss": 0.74, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.5975510649075915e-05, |
|
"loss": 0.8421, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.594507099443739e-05, |
|
"loss": 0.7091, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.591461125272093e-05, |
|
"loss": 0.8217, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.5884131479828155e-05, |
|
"loss": 0.797, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.585363173169744e-05, |
|
"loss": 0.8585, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.582311206430383e-05, |
|
"loss": 0.7903, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.579257253365894e-05, |
|
"loss": 0.9192, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.57620131958108e-05, |
|
"loss": 0.8264, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.5731434106843836e-05, |
|
"loss": 1.0079, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.57008353228787e-05, |
|
"loss": 0.7485, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.567021690007218e-05, |
|
"loss": 0.7599, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.563957889461713e-05, |
|
"loss": 0.6383, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.560892136274232e-05, |
|
"loss": 0.6461, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.5578244360712373e-05, |
|
"loss": 0.7783, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.5547547944827644e-05, |
|
"loss": 0.7189, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.55168321714241e-05, |
|
"loss": 0.7878, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.548609709687326e-05, |
|
"loss": 0.8456, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.545534277758205e-05, |
|
"loss": 0.8697, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.542456926999271e-05, |
|
"loss": 0.9097, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.539377663058271e-05, |
|
"loss": 0.7874, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.536296491586463e-05, |
|
"loss": 0.8056, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.533213418238605e-05, |
|
"loss": 0.7522, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.530128448672944e-05, |
|
"loss": 0.7501, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.527041588551212e-05, |
|
"loss": 0.7787, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.523952843538604e-05, |
|
"loss": 0.7066, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.520862219303781e-05, |
|
"loss": 0.7545, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.5177697215188475e-05, |
|
"loss": 0.8294, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.51467535585935e-05, |
|
"loss": 0.7757, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.5115791280042606e-05, |
|
"loss": 0.7384, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.508481043635971e-05, |
|
"loss": 0.8955, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.505381108440279e-05, |
|
"loss": 0.6803, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.502279328106377e-05, |
|
"loss": 0.8034, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.49917570832685e-05, |
|
"loss": 0.8621, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.496070254797652e-05, |
|
"loss": 0.782, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.4929629732181064e-05, |
|
"loss": 0.7774, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.489853869290889e-05, |
|
"loss": 0.7672, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.486742948722024e-05, |
|
"loss": 0.7518, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.4836302172208645e-05, |
|
"loss": 0.9012, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.480515680500092e-05, |
|
"loss": 0.8715, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.477399344275698e-05, |
|
"loss": 0.7544, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.474281214266977e-05, |
|
"loss": 0.6645, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.471161296196516e-05, |
|
"loss": 0.9114, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.468039595790184e-05, |
|
"loss": 0.7907, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.4649161187771206e-05, |
|
"loss": 0.9009, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.461790870889725e-05, |
|
"loss": 0.6086, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.458663857863649e-05, |
|
"loss": 0.8777, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.455535085437781e-05, |
|
"loss": 0.6795, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4524045593542386e-05, |
|
"loss": 0.6594, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.449272285358361e-05, |
|
"loss": 0.6547, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.446138269198692e-05, |
|
"loss": 0.8464, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4430025166269744e-05, |
|
"loss": 0.7462, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4398650333981354e-05, |
|
"loss": 0.8541, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.436725825270283e-05, |
|
"loss": 0.8847, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.433584898004685e-05, |
|
"loss": 0.6683, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.43044225736577e-05, |
|
"loss": 0.7771, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.427297909121107e-05, |
|
"loss": 0.7601, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4241518590414e-05, |
|
"loss": 0.8268, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.421004112900477e-05, |
|
"loss": 0.7158, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.417854676475279e-05, |
|
"loss": 0.7148, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4147035555458477e-05, |
|
"loss": 0.7187, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4115507558953174e-05, |
|
"loss": 0.8365, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.408396283309903e-05, |
|
"loss": 0.7431, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4052401435788885e-05, |
|
"loss": 0.7332, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.4020823424946205e-05, |
|
"loss": 0.8885, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.3989228858524914e-05, |
|
"loss": 0.6457, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.3957617794509344e-05, |
|
"loss": 0.7648, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.392599029091409e-05, |
|
"loss": 1.0406, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.389434640578392e-05, |
|
"loss": 0.7927, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.386268619719366e-05, |
|
"loss": 0.8087, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.383100972324812e-05, |
|
"loss": 0.7671, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.379931704208193e-05, |
|
"loss": 0.8655, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3767608211859495e-05, |
|
"loss": 0.7414, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.373588329077482e-05, |
|
"loss": 0.7659, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.370414233705147e-05, |
|
"loss": 0.6748, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.367238540894243e-05, |
|
"loss": 0.6292, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3640612564729996e-05, |
|
"loss": 0.6958, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"eval_loss": 0.775384783744812, |
|
"eval_runtime": 10.201, |
|
"eval_samples_per_second": 3.627, |
|
"eval_steps_per_second": 0.49, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3608823862725675e-05, |
|
"loss": 0.6352, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.357701936127008e-05, |
|
"loss": 0.7488, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.354519911873282e-05, |
|
"loss": 0.6704, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.351336319351238e-05, |
|
"loss": 0.8388, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.348151164403605e-05, |
|
"loss": 0.7291, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.344964452875978e-05, |
|
"loss": 0.7755, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.34177619061681e-05, |
|
"loss": 0.8571, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3385863834773975e-05, |
|
"loss": 0.767, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.335395037311874e-05, |
|
"loss": 0.8152, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.332202157977198e-05, |
|
"loss": 0.6353, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.329007751333142e-05, |
|
"loss": 0.9138, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3258118232422787e-05, |
|
"loss": 0.8154, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.322614379569975e-05, |
|
"loss": 0.7702, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.319415426184381e-05, |
|
"loss": 0.7814, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3162149689564135e-05, |
|
"loss": 0.788, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.3130130137597546e-05, |
|
"loss": 0.932, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.309809566470829e-05, |
|
"loss": 0.8445, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.3066046329688056e-05, |
|
"loss": 0.8815, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.303398219135578e-05, |
|
"loss": 0.861, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.300190330855756e-05, |
|
"loss": 0.7331, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2969809740166583e-05, |
|
"loss": 0.7393, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.293770154508296e-05, |
|
"loss": 0.8168, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2905578782233665e-05, |
|
"loss": 0.8113, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.287344151057239e-05, |
|
"loss": 0.7994, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2841289789079465e-05, |
|
"loss": 0.7081, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.280912367676173e-05, |
|
"loss": 0.6252, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2776943232652465e-05, |
|
"loss": 0.7764, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.27447485158112e-05, |
|
"loss": 0.7369, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.271253958532372e-05, |
|
"loss": 0.7788, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.268031650030186e-05, |
|
"loss": 0.7606, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.264807931988342e-05, |
|
"loss": 0.9283, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.261582810323211e-05, |
|
"loss": 0.7162, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.258356290953736e-05, |
|
"loss": 0.8287, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2551283798014285e-05, |
|
"loss": 0.7492, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.25189908279035e-05, |
|
"loss": 0.7372, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.248668405847111e-05, |
|
"loss": 0.6945, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.24543635490085e-05, |
|
"loss": 0.6628, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.24220293588323e-05, |
|
"loss": 0.7582, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.2389681547284224e-05, |
|
"loss": 0.8779, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.2357320173731e-05, |
|
"loss": 0.6828, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.2324945297564236e-05, |
|
"loss": 0.7707, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.229255697820034e-05, |
|
"loss": 0.8004, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.2260155275080363e-05, |
|
"loss": 0.6871, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.222774024766995e-05, |
|
"loss": 0.9631, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.219531195545918e-05, |
|
"loss": 0.7378, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.216287045796247e-05, |
|
"loss": 0.8937, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.213041581471849e-05, |
|
"loss": 0.7671, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.209794808529004e-05, |
|
"loss": 0.6912, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.20654673292639e-05, |
|
"loss": 0.8395, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.2032973606250804e-05, |
|
"loss": 0.8311, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.200046697588526e-05, |
|
"loss": 0.7332, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.196794749782544e-05, |
|
"loss": 0.6453, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.1935415231753155e-05, |
|
"loss": 0.7365, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.190287023737364e-05, |
|
"loss": 0.8021, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.187031257441549e-05, |
|
"loss": 0.7917, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.183774230263058e-05, |
|
"loss": 0.813, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.18051594817939e-05, |
|
"loss": 0.768, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.1772564171703475e-05, |
|
"loss": 0.7123, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.173995643218025e-05, |
|
"loss": 0.836, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.1707336323068e-05, |
|
"loss": 0.7068, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.167470390423317e-05, |
|
"loss": 0.6678, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.164205923556483e-05, |
|
"loss": 0.699, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.1609402376974495e-05, |
|
"loss": 0.8364, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.157673338839609e-05, |
|
"loss": 0.6276, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.154405232978577e-05, |
|
"loss": 0.627, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.151135926112187e-05, |
|
"loss": 0.6027, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.147865424240474e-05, |
|
"loss": 0.6496, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.144593733365667e-05, |
|
"loss": 0.7317, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.14132085949218e-05, |
|
"loss": 0.7783, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.138046808626591e-05, |
|
"loss": 0.7837, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.134771586777647e-05, |
|
"loss": 0.7783, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.131495199956237e-05, |
|
"loss": 0.7335, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.128217654175393e-05, |
|
"loss": 0.7112, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.124938955450269e-05, |
|
"loss": 0.8552, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.1216591097981394e-05, |
|
"loss": 0.7491, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.118378123238383e-05, |
|
"loss": 0.8029, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.1150960017924685e-05, |
|
"loss": 0.6775, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.111812751483952e-05, |
|
"loss": 0.8973, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.1085283783384586e-05, |
|
"loss": 0.7634, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.1052428883836764e-05, |
|
"loss": 0.8199, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.1019562876493397e-05, |
|
"loss": 0.6868, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.0986685821672264e-05, |
|
"loss": 0.6868, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.095379777971135e-05, |
|
"loss": 0.8666, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.092089881096888e-05, |
|
"loss": 0.6998, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.088798897582308e-05, |
|
"loss": 0.6144, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 3.085506833467214e-05, |
|
"loss": 0.9385, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.082213694793407e-05, |
|
"loss": 0.7249, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0789194876046614e-05, |
|
"loss": 0.8582, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.075624217946712e-05, |
|
"loss": 0.7414, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.072327891867243e-05, |
|
"loss": 0.8248, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.069030515415878e-05, |
|
"loss": 0.7254, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0657320946441686e-05, |
|
"loss": 0.6965, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0624326356055824e-05, |
|
"loss": 0.7979, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.059132144355494e-05, |
|
"loss": 0.6902, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0558306269511686e-05, |
|
"loss": 0.7549, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.052528089451759e-05, |
|
"loss": 0.757, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0492245379182877e-05, |
|
"loss": 0.7158, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.045919978413639e-05, |
|
"loss": 0.7439, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0426144170025456e-05, |
|
"loss": 0.7263, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0393078597515827e-05, |
|
"loss": 0.8714, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"eval_loss": 0.7731763124465942, |
|
"eval_runtime": 10.1599, |
|
"eval_samples_per_second": 3.642, |
|
"eval_steps_per_second": 0.492, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0360003127291494e-05, |
|
"loss": 0.7357, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0326917820054622e-05, |
|
"loss": 0.6703, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.029382273652544e-05, |
|
"loss": 0.7843, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0260717937442113e-05, |
|
"loss": 0.8579, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.022760348356063e-05, |
|
"loss": 0.8031, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.019447943565471e-05, |
|
"loss": 0.763, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0161345854515672e-05, |
|
"loss": 0.9019, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.012820280095234e-05, |
|
"loss": 0.7761, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.0095050335790903e-05, |
|
"loss": 0.6707, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.006188851987485e-05, |
|
"loss": 0.7175, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 3.0028717414064805e-05, |
|
"loss": 0.8008, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9995537079238467e-05, |
|
"loss": 0.7714, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9962347576290444e-05, |
|
"loss": 0.6841, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.99291489661322e-05, |
|
"loss": 0.8408, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9895941309691883e-05, |
|
"loss": 0.8279, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9862724667914265e-05, |
|
"loss": 0.7386, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.982949910176061e-05, |
|
"loss": 0.7823, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9796264672208535e-05, |
|
"loss": 0.7185, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9763021440251954e-05, |
|
"loss": 0.7604, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9729769466900918e-05, |
|
"loss": 0.8062, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9696508813181534e-05, |
|
"loss": 0.6379, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9663239540135818e-05, |
|
"loss": 0.6967, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9629961708821618e-05, |
|
"loss": 0.695, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9596675380312507e-05, |
|
"loss": 0.8031, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9563380615697605e-05, |
|
"loss": 0.7674, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.953007747608158e-05, |
|
"loss": 0.8916, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9496766022584398e-05, |
|
"loss": 0.8159, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9463446316341346e-05, |
|
"loss": 0.7979, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.943011841850281e-05, |
|
"loss": 0.729, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9396782390234245e-05, |
|
"loss": 0.9, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.9363438292716004e-05, |
|
"loss": 0.8511, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.933008618714327e-05, |
|
"loss": 0.923, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.9296726134725887e-05, |
|
"loss": 0.8954, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.926335819668832e-05, |
|
"loss": 0.8655, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.922998243426951e-05, |
|
"loss": 0.8657, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.919659890872271e-05, |
|
"loss": 0.8267, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.9163207681315476e-05, |
|
"loss": 0.6383, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.912980881332945e-05, |
|
"loss": 0.7495, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.909640236606035e-05, |
|
"loss": 0.748, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.906298840081775e-05, |
|
"loss": 0.8319, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.9029566978925055e-05, |
|
"loss": 0.8384, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.899613816171935e-05, |
|
"loss": 0.8418, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8962702010551285e-05, |
|
"loss": 0.6754, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8929258586784974e-05, |
|
"loss": 0.8605, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8895807951797866e-05, |
|
"loss": 0.7574, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8862350166980673e-05, |
|
"loss": 0.6856, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8828885293737206e-05, |
|
"loss": 0.676, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8795413393484288e-05, |
|
"loss": 0.7304, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8761934527651642e-05, |
|
"loss": 0.8223, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8728448757681775e-05, |
|
"loss": 0.9364, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.869495614502986e-05, |
|
"loss": 0.8179, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8661456751163633e-05, |
|
"loss": 0.8226, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8627950637563283e-05, |
|
"loss": 0.8599, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8594437865721303e-05, |
|
"loss": 0.8034, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.8560918497142443e-05, |
|
"loss": 0.7842, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.852739259334354e-05, |
|
"loss": 0.8364, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8493860215853413e-05, |
|
"loss": 0.6798, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8460321426212793e-05, |
|
"loss": 0.6865, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.842677628597415e-05, |
|
"loss": 0.7019, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.839322485670163e-05, |
|
"loss": 0.769, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.83596671999709e-05, |
|
"loss": 0.7563, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8326103377369073e-05, |
|
"loss": 0.7969, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.829253345049458e-05, |
|
"loss": 0.7958, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.825895748095703e-05, |
|
"loss": 0.7038, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8225375530377162e-05, |
|
"loss": 0.7533, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8191787660386653e-05, |
|
"loss": 0.7245, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8158193932628068e-05, |
|
"loss": 0.6946, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.812459440875471e-05, |
|
"loss": 0.7737, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8090989150430523e-05, |
|
"loss": 0.6086, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.8057378219329983e-05, |
|
"loss": 0.7691, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.802376167713796e-05, |
|
"loss": 0.6517, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7990139585549646e-05, |
|
"loss": 0.7769, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7956512006270387e-05, |
|
"loss": 0.7431, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7922879001015632e-05, |
|
"loss": 0.7643, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.788924063151076e-05, |
|
"loss": 0.8355, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7855596959491015e-05, |
|
"loss": 0.6971, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.782194804670136e-05, |
|
"loss": 0.8161, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.77882939548964e-05, |
|
"loss": 0.6974, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.77546347458402e-05, |
|
"loss": 0.7395, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7720970481306258e-05, |
|
"loss": 0.671, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7687301223077332e-05, |
|
"loss": 0.64, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7653627032945346e-05, |
|
"loss": 0.7831, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.761994797271129e-05, |
|
"loss": 0.755, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7586264104185055e-05, |
|
"loss": 0.656, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.75525754891854e-05, |
|
"loss": 0.7147, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7518882189539762e-05, |
|
"loss": 0.7379, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.74851842670842e-05, |
|
"loss": 0.6838, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7451481783663246e-05, |
|
"loss": 0.8398, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.741777480112979e-05, |
|
"loss": 0.7378, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7384063381345017e-05, |
|
"loss": 0.9643, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7350347586178194e-05, |
|
"loss": 0.8147, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7316627477506688e-05, |
|
"loss": 0.6773, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7282903117215724e-05, |
|
"loss": 0.7694, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.724917456719837e-05, |
|
"loss": 0.82, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7215441889355364e-05, |
|
"loss": 0.642, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.718170514559502e-05, |
|
"loss": 0.7938, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7147964397833127e-05, |
|
"loss": 0.8053, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7114219707992798e-05, |
|
"loss": 0.7755, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7080471138004405e-05, |
|
"loss": 0.7722, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7046718749805422e-05, |
|
"loss": 0.8225, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"eval_loss": 0.7722584009170532, |
|
"eval_runtime": 10.1954, |
|
"eval_samples_per_second": 3.629, |
|
"eval_steps_per_second": 0.49, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.7012962605340363e-05, |
|
"loss": 0.8115, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6979202766560585e-05, |
|
"loss": 0.7637, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.694543929542427e-05, |
|
"loss": 0.7145, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6911672253896247e-05, |
|
"loss": 0.7826, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6877901703947895e-05, |
|
"loss": 0.7221, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6844127707557036e-05, |
|
"loss": 0.7527, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.681035032670782e-05, |
|
"loss": 0.8327, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6776569623390614e-05, |
|
"loss": 0.8694, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6742785659601866e-05, |
|
"loss": 1.0126, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6708998497344007e-05, |
|
"loss": 0.6732, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6675208198625367e-05, |
|
"loss": 0.907, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6641414825460004e-05, |
|
"loss": 0.7191, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6607618439867633e-05, |
|
"loss": 0.6305, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6573819103873475e-05, |
|
"loss": 0.7972, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6540016879508204e-05, |
|
"loss": 0.7695, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.650621182880776e-05, |
|
"loss": 0.745, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6472404013813283e-05, |
|
"loss": 0.6017, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6438593496571e-05, |
|
"loss": 0.6953, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6404780339132067e-05, |
|
"loss": 0.7556, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6370964603552517e-05, |
|
"loss": 0.675, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6337146351893087e-05, |
|
"loss": 0.844, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6303325646219157e-05, |
|
"loss": 0.7474, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.626950254860059e-05, |
|
"loss": 0.6728, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.623567712111165e-05, |
|
"loss": 0.6939, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6201849425830877e-05, |
|
"loss": 0.8752, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6168019524840963e-05, |
|
"loss": 0.7942, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6134187480228666e-05, |
|
"loss": 0.7685, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.610035335408466e-05, |
|
"loss": 0.8832, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.6066517208503453e-05, |
|
"loss": 0.7798, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.603267910558324e-05, |
|
"loss": 0.7087, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5998839107425834e-05, |
|
"loss": 0.6909, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.596499727613651e-05, |
|
"loss": 0.6266, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.593115367382391e-05, |
|
"loss": 0.8294, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.589730836259992e-05, |
|
"loss": 0.8246, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5863461404579575e-05, |
|
"loss": 0.8762, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.582961286188094e-05, |
|
"loss": 0.7673, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.579576279662495e-05, |
|
"loss": 1.0006, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.576191127093538e-05, |
|
"loss": 0.7947, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5728058346938644e-05, |
|
"loss": 0.7919, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5694204086763766e-05, |
|
"loss": 0.7844, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5660348552542174e-05, |
|
"loss": 0.7466, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5626491806407683e-05, |
|
"loss": 0.7931, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5592633910496288e-05, |
|
"loss": 0.6076, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5558774926946127e-05, |
|
"loss": 0.7598, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5524914917897325e-05, |
|
"loss": 0.6587, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5491053945491866e-05, |
|
"loss": 0.7519, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5457192071873546e-05, |
|
"loss": 0.7876, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.542332935918777e-05, |
|
"loss": 0.7594, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5389465869581512e-05, |
|
"loss": 0.8365, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5355601665203167e-05, |
|
"loss": 0.7155, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5321736808202433e-05, |
|
"loss": 0.8049, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.528787136073022e-05, |
|
"loss": 0.7455, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5254005384938496e-05, |
|
"loss": 0.7744, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5220138942980225e-05, |
|
"loss": 0.7778, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5186272097009216e-05, |
|
"loss": 0.8424, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5152404909180023e-05, |
|
"loss": 0.7876, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5118537441647815e-05, |
|
"loss": 0.7195, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5084669756568296e-05, |
|
"loss": 0.8405, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.505080191609755e-05, |
|
"loss": 0.5727, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5016933982391954e-05, |
|
"loss": 0.7626, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4983066017608048e-05, |
|
"loss": 0.7927, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.494919808390246e-05, |
|
"loss": 0.9519, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4915330243431713e-05, |
|
"loss": 0.6401, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.488146255835219e-05, |
|
"loss": 0.7766, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.484759509081998e-05, |
|
"loss": 0.8009, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4813727902990786e-05, |
|
"loss": 0.676, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4779861057019784e-05, |
|
"loss": 0.7808, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.474599461506151e-05, |
|
"loss": 0.6557, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.471212863926979e-05, |
|
"loss": 0.7341, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.467826319179757e-05, |
|
"loss": 0.7867, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4644398334796842e-05, |
|
"loss": 0.8283, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4610534130418487e-05, |
|
"loss": 0.8258, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.457667064081224e-05, |
|
"loss": 0.7862, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4542807928126466e-05, |
|
"loss": 0.6769, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4508946054508136e-05, |
|
"loss": 0.8143, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4475085082102685e-05, |
|
"loss": 0.8902, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4441225073053875e-05, |
|
"loss": 0.6589, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.440736608950372e-05, |
|
"loss": 0.7613, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4373508193592326e-05, |
|
"loss": 0.7117, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4339651447457832e-05, |
|
"loss": 0.7171, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4305795913236246e-05, |
|
"loss": 0.6979, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4271941653061355e-05, |
|
"loss": 0.724, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4238088729064627e-05, |
|
"loss": 0.871, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4204237203375056e-05, |
|
"loss": 0.8349, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4170387138119072e-05, |
|
"loss": 0.7212, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.413653859542042e-05, |
|
"loss": 0.7457, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4102691637400083e-05, |
|
"loss": 0.7321, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4068846326176096e-05, |
|
"loss": 0.7771, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.403500272386349e-05, |
|
"loss": 0.8963, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.400116089257417e-05, |
|
"loss": 0.7219, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.3967320894416765e-05, |
|
"loss": 0.7724, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.3933482791496556e-05, |
|
"loss": 0.6775, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.389964664591534e-05, |
|
"loss": 0.8789, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.3865812519771337e-05, |
|
"loss": 0.8096, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.3831980475159043e-05, |
|
"loss": 0.7285, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3798150574169135e-05, |
|
"loss": 0.6603, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3764322878888354e-05, |
|
"loss": 0.7982, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3730497451399417e-05, |
|
"loss": 0.8034, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3696674353780853e-05, |
|
"loss": 0.6944, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3662853648106916e-05, |
|
"loss": 0.638, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"eval_loss": 0.7718954682350159, |
|
"eval_runtime": 10.1807, |
|
"eval_samples_per_second": 3.634, |
|
"eval_steps_per_second": 0.491, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.362903539644749e-05, |
|
"loss": 0.7747, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.359521966086794e-05, |
|
"loss": 0.7324, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.356140650342901e-05, |
|
"loss": 0.7613, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.352759598618672e-05, |
|
"loss": 0.7137, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3493788171192242e-05, |
|
"loss": 0.6523, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3459983120491808e-05, |
|
"loss": 0.7382, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.342618089612652e-05, |
|
"loss": 0.7613, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3392381560132376e-05, |
|
"loss": 0.6524, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3358585174539998e-05, |
|
"loss": 0.8847, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.332479180137464e-05, |
|
"loss": 0.683, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3291001502655992e-05, |
|
"loss": 0.9041, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3257214340398143e-05, |
|
"loss": 0.6465, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.32234303766094e-05, |
|
"loss": 0.7058, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.318964967329218e-05, |
|
"loss": 0.7959, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.315587229244297e-05, |
|
"loss": 0.7706, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3122098296052114e-05, |
|
"loss": 0.8048, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3088327746103765e-05, |
|
"loss": 0.7886, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3054560704575733e-05, |
|
"loss": 0.7493, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.302079723343942e-05, |
|
"loss": 0.8783, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2987037394659647e-05, |
|
"loss": 0.7896, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2953281250194573e-05, |
|
"loss": 0.8975, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.29195288619956e-05, |
|
"loss": 0.7124, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2885780292007208e-05, |
|
"loss": 0.7599, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2852035602166886e-05, |
|
"loss": 0.6605, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.281829485440498e-05, |
|
"loss": 0.8076, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.278455811064464e-05, |
|
"loss": 0.7865, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2750825432801633e-05, |
|
"loss": 0.7041, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2717096882784275e-05, |
|
"loss": 0.7624, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2683372522493318e-05, |
|
"loss": 0.8957, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2649652413821812e-05, |
|
"loss": 0.9792, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2615936618655e-05, |
|
"loss": 0.7144, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.258222519887021e-05, |
|
"loss": 0.723, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.254851821633676e-05, |
|
"loss": 0.8101, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2514815732915804e-05, |
|
"loss": 0.6864, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2481117810460247e-05, |
|
"loss": 0.9122, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2447424510814603e-05, |
|
"loss": 0.6056, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.241373589581495e-05, |
|
"loss": 0.8158, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2380052027288723e-05, |
|
"loss": 0.7081, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.2346372967054653e-05, |
|
"loss": 0.8738, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.231269877692267e-05, |
|
"loss": 0.6473, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.227902951869375e-05, |
|
"loss": 0.7287, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2245365254159808e-05, |
|
"loss": 0.6395, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.221170604510361e-05, |
|
"loss": 0.7358, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.217805195329864e-05, |
|
"loss": 0.7528, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.214440304050899e-05, |
|
"loss": 0.6483, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2110759368489242e-05, |
|
"loss": 0.632, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2077120998984373e-05, |
|
"loss": 0.8643, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.204348799372962e-05, |
|
"loss": 0.7454, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2009860414450363e-05, |
|
"loss": 0.8798, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1976238322862043e-05, |
|
"loss": 0.7703, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1942621780670023e-05, |
|
"loss": 0.7722, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1909010849569483e-05, |
|
"loss": 0.8465, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1875405591245292e-05, |
|
"loss": 0.7531, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1841806067371938e-05, |
|
"loss": 0.7937, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1808212339613352e-05, |
|
"loss": 0.7941, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1774624469622844e-05, |
|
"loss": 0.9063, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1741042519042968e-05, |
|
"loss": 0.705, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1707466549505427e-05, |
|
"loss": 0.7492, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1673896622630936e-05, |
|
"loss": 0.6582, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1640332800029105e-05, |
|
"loss": 0.7255, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1606775143298377e-05, |
|
"loss": 0.8809, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1573223714025852e-05, |
|
"loss": 0.8045, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1539678573787216e-05, |
|
"loss": 0.7698, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.1506139784146583e-05, |
|
"loss": 0.7583, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.147260740665647e-05, |
|
"loss": 0.7882, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.143908150285756e-05, |
|
"loss": 0.6991, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.1405562134278703e-05, |
|
"loss": 0.811, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.1372049362436723e-05, |
|
"loss": 0.746, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.133854324883637e-05, |
|
"loss": 0.7879, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.130504385497015e-05, |
|
"loss": 0.6192, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.1271551242318224e-05, |
|
"loss": 0.7478, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.123806547234836e-05, |
|
"loss": 0.6913, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.1204586606515718e-05, |
|
"loss": 0.7214, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.11711147062628e-05, |
|
"loss": 0.6534, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.1137649833019326e-05, |
|
"loss": 0.6412, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.110419204820214e-05, |
|
"loss": 0.6778, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.107074141321504e-05, |
|
"loss": 0.8778, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.103729798944872e-05, |
|
"loss": 0.7754, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.100386183828065e-05, |
|
"loss": 0.8971, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.0970433021074947e-05, |
|
"loss": 0.9264, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.0937011599182264e-05, |
|
"loss": 0.6872, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.0903597633939654e-05, |
|
"loss": 0.9361, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.087019118667055e-05, |
|
"loss": 0.6723, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.0836792318684533e-05, |
|
"loss": 0.6403, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.080340109127729e-05, |
|
"loss": 0.6656, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.0770017565730493e-05, |
|
"loss": 0.7191, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.073664180331168e-05, |
|
"loss": 0.9078, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.070327386527412e-05, |
|
"loss": 0.7715, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.066991381285674e-05, |
|
"loss": 0.8872, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0636561707284e-05, |
|
"loss": 0.6862, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.060321760976576e-05, |
|
"loss": 0.7982, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0569881581497187e-05, |
|
"loss": 0.6791, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0536553683658656e-05, |
|
"loss": 0.7163, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0503233977415608e-05, |
|
"loss": 0.9087, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0469922523918432e-05, |
|
"loss": 0.8373, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.043661938430239e-05, |
|
"loss": 0.77, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0403324619687502e-05, |
|
"loss": 0.7141, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.037003829117839e-05, |
|
"loss": 0.7237, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0336760459864194e-05, |
|
"loss": 1.0225, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0303491186818475e-05, |
|
"loss": 0.8124, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 0.7701284289360046, |
|
"eval_runtime": 10.1766, |
|
"eval_samples_per_second": 3.636, |
|
"eval_steps_per_second": 0.491, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0270230533099084e-05, |
|
"loss": 0.7718, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.023697855974805e-05, |
|
"loss": 0.7646, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0203735327791468e-05, |
|
"loss": 0.7243, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.01705008982394e-05, |
|
"loss": 0.8042, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.013727533208574e-05, |
|
"loss": 0.6866, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0104058690308126e-05, |
|
"loss": 0.7828, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0070851033867807e-05, |
|
"loss": 0.7398, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0037652423709558e-05, |
|
"loss": 0.8089, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.0004462920761542e-05, |
|
"loss": 0.7142, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.997128258593519e-05, |
|
"loss": 0.8548, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 1.9938111480125157e-05, |
|
"loss": 0.7748, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9904949664209106e-05, |
|
"loss": 0.7832, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.987179719904767e-05, |
|
"loss": 0.739, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.983865414548433e-05, |
|
"loss": 0.7557, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9805520564345293e-05, |
|
"loss": 0.7588, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.977239651643938e-05, |
|
"loss": 0.8288, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9739282062557886e-05, |
|
"loss": 0.7496, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9706177263474563e-05, |
|
"loss": 0.6867, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9673082179945384e-05, |
|
"loss": 0.7227, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9639996872708512e-05, |
|
"loss": 0.6423, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9606921402484176e-05, |
|
"loss": 0.777, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9573855829974547e-05, |
|
"loss": 0.7567, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9540800215863624e-05, |
|
"loss": 0.7351, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9507754620817133e-05, |
|
"loss": 0.59, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9474719105482416e-05, |
|
"loss": 0.7092, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.944169373048832e-05, |
|
"loss": 0.6918, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9408678556445077e-05, |
|
"loss": 0.8209, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.937567364394417e-05, |
|
"loss": 0.8695, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.934267905355832e-05, |
|
"loss": 0.6717, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9309694845841225e-05, |
|
"loss": 0.7827, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9276721081327576e-05, |
|
"loss": 0.8805, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9243757820532887e-05, |
|
"loss": 0.8435, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.9210805123953395e-05, |
|
"loss": 0.8245, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 1.917786305206594e-05, |
|
"loss": 0.9219, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9144931665327868e-05, |
|
"loss": 0.8257, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9112011024176922e-05, |
|
"loss": 0.9376, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9079101189031124e-05, |
|
"loss": 0.5985, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9046202220288658e-05, |
|
"loss": 0.7893, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.9013314178327745e-05, |
|
"loss": 0.7461, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8980437123506606e-05, |
|
"loss": 0.8159, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.894757111616325e-05, |
|
"loss": 0.7261, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8914716216615416e-05, |
|
"loss": 0.715, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8881872485160485e-05, |
|
"loss": 0.8674, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.884903998207532e-05, |
|
"loss": 0.6603, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8816218767616185e-05, |
|
"loss": 0.6988, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8783408902018605e-05, |
|
"loss": 0.7279, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8750610445497314e-05, |
|
"loss": 0.7453, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.871782345824608e-05, |
|
"loss": 0.9567, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.868504800043763e-05, |
|
"loss": 0.6403, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8652284132223534e-05, |
|
"loss": 0.8241, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8619531913734097e-05, |
|
"loss": 0.8506, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8586791405078218e-05, |
|
"loss": 0.7239, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8554062666343332e-05, |
|
"loss": 0.729, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8521345757595265e-05, |
|
"loss": 0.8423, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8488640738878136e-05, |
|
"loss": 0.7938, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8455947670214226e-05, |
|
"loss": 0.6189, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.8423266611603918e-05, |
|
"loss": 0.6484, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.839059762302551e-05, |
|
"loss": 0.7402, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8357940764435182e-05, |
|
"loss": 0.7406, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.832529609576683e-05, |
|
"loss": 0.7281, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8292663676932005e-05, |
|
"loss": 0.7911, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8260043567819757e-05, |
|
"loss": 0.6116, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8227435828296524e-05, |
|
"loss": 0.688, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8194840518206104e-05, |
|
"loss": 0.8594, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.816225769736942e-05, |
|
"loss": 0.7366, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8129687425584513e-05, |
|
"loss": 0.6557, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8097129762626364e-05, |
|
"loss": 0.8365, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.806458476824685e-05, |
|
"loss": 0.7672, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.8032052502174564e-05, |
|
"loss": 0.7921, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7999533024114757e-05, |
|
"loss": 0.7398, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.79670263937492e-05, |
|
"loss": 0.7764, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.79345326707361e-05, |
|
"loss": 0.8442, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.790205191470997e-05, |
|
"loss": 0.7946, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7869584185281503e-05, |
|
"loss": 0.7367, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7837129542037533e-05, |
|
"loss": 0.7633, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.780468804454083e-05, |
|
"loss": 0.7926, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7772259752330056e-05, |
|
"loss": 0.8245, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7739844724919636e-05, |
|
"loss": 0.8456, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7707443021799664e-05, |
|
"loss": 0.9084, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.7675054702435774e-05, |
|
"loss": 0.7188, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.764267982626901e-05, |
|
"loss": 0.775, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7610318452715782e-05, |
|
"loss": 0.7102, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7577970641167706e-05, |
|
"loss": 0.7563, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7545636450991507e-05, |
|
"loss": 0.7591, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.751331594152889e-05, |
|
"loss": 0.841, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.74810091720965e-05, |
|
"loss": 0.8448, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7448716201985728e-05, |
|
"loss": 0.6866, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7416437090462644e-05, |
|
"loss": 0.7427, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7384171896767896e-05, |
|
"loss": 0.7489, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.735192068011658e-05, |
|
"loss": 0.5422, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7319683499698152e-05, |
|
"loss": 0.8508, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7287460414676283e-05, |
|
"loss": 0.8014, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.72552514841888e-05, |
|
"loss": 0.7901, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7223056767347548e-05, |
|
"loss": 0.6313, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.719087632323827e-05, |
|
"loss": 0.7272, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.715871021092054e-05, |
|
"loss": 0.7253, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7126558489427622e-05, |
|
"loss": 0.6508, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.7094421217766344e-05, |
|
"loss": 0.8422, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.706229845491704e-05, |
|
"loss": 0.801, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.703019025983342e-05, |
|
"loss": 0.7436, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"eval_loss": 0.7689833641052246, |
|
"eval_runtime": 10.1586, |
|
"eval_samples_per_second": 3.642, |
|
"eval_steps_per_second": 0.492, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.699809669144244e-05, |
|
"loss": 0.8388, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.6966017808644225e-05, |
|
"loss": 0.7115, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.693395367031195e-05, |
|
"loss": 0.8026, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6901904335291717e-05, |
|
"loss": 0.6485, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6869869862402466e-05, |
|
"loss": 0.6811, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.683785031043586e-05, |
|
"loss": 0.6476, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6805845738156194e-05, |
|
"loss": 0.7066, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.677385620430026e-05, |
|
"loss": 0.8859, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6741881767577226e-05, |
|
"loss": 0.7926, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6709922486668588e-05, |
|
"loss": 0.7507, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.667797842022802e-05, |
|
"loss": 0.8807, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6646049626881265e-05, |
|
"loss": 0.7969, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.661413616522603e-05, |
|
"loss": 0.7937, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6582238093831903e-05, |
|
"loss": 0.845, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6550355471240224e-05, |
|
"loss": 0.6704, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6518488355963958e-05, |
|
"loss": 0.7718, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6486636806487626e-05, |
|
"loss": 0.6275, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.645480088126719e-05, |
|
"loss": 0.8281, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.642298063872993e-05, |
|
"loss": 0.8532, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6391176137274324e-05, |
|
"loss": 0.6332, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6359387435270007e-05, |
|
"loss": 0.7622, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6327614591057576e-05, |
|
"loss": 0.7457, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6295857662948534e-05, |
|
"loss": 0.7234, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6264116709225186e-05, |
|
"loss": 0.7721, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.623239178814051e-05, |
|
"loss": 0.7289, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6200682957918074e-05, |
|
"loss": 0.7806, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.6168990276751884e-05, |
|
"loss": 0.6087, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6137313802806343e-05, |
|
"loss": 0.6868, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.610565359421609e-05, |
|
"loss": 0.771, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.6074009709085924e-05, |
|
"loss": 0.7682, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.604238220549066e-05, |
|
"loss": 0.6348, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.601077114147509e-05, |
|
"loss": 0.794, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.59791765750538e-05, |
|
"loss": 0.8844, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5947598564211114e-05, |
|
"loss": 0.8491, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5916037166900977e-05, |
|
"loss": 0.8331, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5884492441046832e-05, |
|
"loss": 0.6503, |
|
"step": 1436 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5852964444541533e-05, |
|
"loss": 0.7658, |
|
"step": 1437 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5821453235247215e-05, |
|
"loss": 0.8397, |
|
"step": 1438 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5789958870995236e-05, |
|
"loss": 0.8869, |
|
"step": 1439 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.575848140958601e-05, |
|
"loss": 0.7899, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.572702090878895e-05, |
|
"loss": 0.7458, |
|
"step": 1441 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5695577426342307e-05, |
|
"loss": 0.6571, |
|
"step": 1442 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5664151019953156e-05, |
|
"loss": 0.8676, |
|
"step": 1443 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5632741747297182e-05, |
|
"loss": 0.8095, |
|
"step": 1444 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.560134966601865e-05, |
|
"loss": 0.8415, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5569974833730265e-05, |
|
"loss": 0.7099, |
|
"step": 1446 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5538617308013083e-05, |
|
"loss": 0.6655, |
|
"step": 1447 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.5507277146416398e-05, |
|
"loss": 0.8824, |
|
"step": 1448 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.547595440645761e-05, |
|
"loss": 0.8194, |
|
"step": 1449 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.54446491456222e-05, |
|
"loss": 0.6864, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5413361421363516e-05, |
|
"loss": 0.7061, |
|
"step": 1451 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5382091291102745e-05, |
|
"loss": 0.7927, |
|
"step": 1452 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5350838812228797e-05, |
|
"loss": 0.7616, |
|
"step": 1453 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5319604042098167e-05, |
|
"loss": 0.7953, |
|
"step": 1454 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5288387038034848e-05, |
|
"loss": 0.7014, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5257187857330235e-05, |
|
"loss": 0.6782, |
|
"step": 1456 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5226006557243024e-05, |
|
"loss": 0.761, |
|
"step": 1457 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5194843194999082e-05, |
|
"loss": 0.9225, |
|
"step": 1458 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.516369782779135e-05, |
|
"loss": 0.8315, |
|
"step": 1459 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5132570512779764e-05, |
|
"loss": 0.7894, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5101461307091113e-05, |
|
"loss": 0.65, |
|
"step": 1461 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.507037026781895e-05, |
|
"loss": 0.7349, |
|
"step": 1462 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5039297452023485e-05, |
|
"loss": 0.7232, |
|
"step": 1463 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.5008242916731505e-05, |
|
"loss": 0.7363, |
|
"step": 1464 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4977206718936232e-05, |
|
"loss": 0.8518, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4946188915597215e-05, |
|
"loss": 0.7308, |
|
"step": 1466 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4915189563640291e-05, |
|
"loss": 0.7339, |
|
"step": 1467 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4884208719957396e-05, |
|
"loss": 0.7194, |
|
"step": 1468 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4853246441406504e-05, |
|
"loss": 0.7901, |
|
"step": 1469 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4822302784811524e-05, |
|
"loss": 0.7507, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4791377806962193e-05, |
|
"loss": 0.7285, |
|
"step": 1471 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4760471564613964e-05, |
|
"loss": 0.882, |
|
"step": 1472 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.4729584114487895e-05, |
|
"loss": 0.688, |
|
"step": 1473 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4698715513270565e-05, |
|
"loss": 0.7817, |
|
"step": 1474 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4667865817613963e-05, |
|
"loss": 0.7811, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4637035084135381e-05, |
|
"loss": 0.7831, |
|
"step": 1476 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4606223369417288e-05, |
|
"loss": 0.776, |
|
"step": 1477 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4575430730007288e-05, |
|
"loss": 0.8137, |
|
"step": 1478 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4544657222417954e-05, |
|
"loss": 0.7777, |
|
"step": 1479 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4513902903126753e-05, |
|
"loss": 0.8117, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4483167828575903e-05, |
|
"loss": 0.8106, |
|
"step": 1481 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4452452055172366e-05, |
|
"loss": 0.7196, |
|
"step": 1482 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4421755639287632e-05, |
|
"loss": 0.8217, |
|
"step": 1483 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4391078637257687e-05, |
|
"loss": 0.4995, |
|
"step": 1484 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4360421105382882e-05, |
|
"loss": 0.6736, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4329783099927826e-05, |
|
"loss": 0.7658, |
|
"step": 1486 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4299164677121308e-05, |
|
"loss": 0.8036, |
|
"step": 1487 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4268565893156167e-05, |
|
"loss": 0.6824, |
|
"step": 1488 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4237986804189202e-05, |
|
"loss": 0.63, |
|
"step": 1489 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4207427466341073e-05, |
|
"loss": 0.8837, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4176887935696164e-05, |
|
"loss": 0.6741, |
|
"step": 1491 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4146368268302573e-05, |
|
"loss": 0.8185, |
|
"step": 1492 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.411586852017186e-05, |
|
"loss": 0.723, |
|
"step": 1493 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.408538874727908e-05, |
|
"loss": 0.7325, |
|
"step": 1494 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.4054929005562605e-05, |
|
"loss": 0.6877, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.402448935092408e-05, |
|
"loss": 0.5648, |
|
"step": 1496 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3994069839228261e-05, |
|
"loss": 0.7489, |
|
"step": 1497 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.39636705263029e-05, |
|
"loss": 0.7981, |
|
"step": 1498 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3933291467938747e-05, |
|
"loss": 0.8061, |
|
"step": 1499 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3902932719889339e-05, |
|
"loss": 0.6837, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"eval_loss": 0.7673142552375793, |
|
"eval_runtime": 10.1791, |
|
"eval_samples_per_second": 3.635, |
|
"eval_steps_per_second": 0.491, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3872594337870945e-05, |
|
"loss": 0.7132, |
|
"step": 1501 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.384227637756246e-05, |
|
"loss": 0.8449, |
|
"step": 1502 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3811978894605298e-05, |
|
"loss": 0.7883, |
|
"step": 1503 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3781701944603292e-05, |
|
"loss": 0.7814, |
|
"step": 1504 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3751445583122594e-05, |
|
"loss": 0.7865, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3721209865691565e-05, |
|
"loss": 0.7895, |
|
"step": 1506 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3690994847800681e-05, |
|
"loss": 0.8147, |
|
"step": 1507 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3660800584902437e-05, |
|
"loss": 0.833, |
|
"step": 1508 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3630627132411214e-05, |
|
"loss": 0.5843, |
|
"step": 1509 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3600474545703248e-05, |
|
"loss": 0.6556, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.357034288011642e-05, |
|
"loss": 0.5857, |
|
"step": 1511 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3540232190950258e-05, |
|
"loss": 0.7229, |
|
"step": 1512 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3510142533465764e-05, |
|
"loss": 0.7408, |
|
"step": 1513 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3480073962885382e-05, |
|
"loss": 0.7245, |
|
"step": 1514 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3450026534392823e-05, |
|
"loss": 0.7624, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3420000303132985e-05, |
|
"loss": 0.7316, |
|
"step": 1516 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3389995324211907e-05, |
|
"loss": 0.8585, |
|
"step": 1517 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3360011652696588e-05, |
|
"loss": 0.7258, |
|
"step": 1518 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.3330049343614951e-05, |
|
"loss": 0.7685, |
|
"step": 1519 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3300108451955663e-05, |
|
"loss": 0.7551, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.327018903266815e-05, |
|
"loss": 0.7523, |
|
"step": 1521 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3240291140662392e-05, |
|
"loss": 0.6846, |
|
"step": 1522 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.321041483080886e-05, |
|
"loss": 0.7882, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3180560157938432e-05, |
|
"loss": 0.8501, |
|
"step": 1524 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3150727176842265e-05, |
|
"loss": 0.7051, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3120915942271708e-05, |
|
"loss": 0.6396, |
|
"step": 1526 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3091126508938203e-05, |
|
"loss": 0.6782, |
|
"step": 1527 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.306135893151318e-05, |
|
"loss": 0.7321, |
|
"step": 1528 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.3031613264627951e-05, |
|
"loss": 0.7165, |
|
"step": 1529 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.300188956287362e-05, |
|
"loss": 0.7861, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2972187880800993e-05, |
|
"loss": 0.7434, |
|
"step": 1531 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2942508272920457e-05, |
|
"loss": 0.7279, |
|
"step": 1532 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2912850793701858e-05, |
|
"loss": 0.8702, |
|
"step": 1533 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2883215497574458e-05, |
|
"loss": 0.7523, |
|
"step": 1534 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2853602438926826e-05, |
|
"loss": 0.8204, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2824011672106679e-05, |
|
"loss": 0.7554, |
|
"step": 1536 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2794443251420846e-05, |
|
"loss": 0.8102, |
|
"step": 1537 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2764897231135143e-05, |
|
"loss": 0.672, |
|
"step": 1538 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2735373665474262e-05, |
|
"loss": 0.6154, |
|
"step": 1539 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2705872608621706e-05, |
|
"loss": 0.7744, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2676394114719647e-05, |
|
"loss": 0.6868, |
|
"step": 1541 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.264693823786886e-05, |
|
"loss": 0.7004, |
|
"step": 1542 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.2617505032128618e-05, |
|
"loss": 0.7484, |
|
"step": 1543 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2588094551516575e-05, |
|
"loss": 0.9453, |
|
"step": 1544 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2558706850008678e-05, |
|
"loss": 0.6682, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2529341981539071e-05, |
|
"loss": 0.7015, |
|
"step": 1546 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2500000000000006e-05, |
|
"loss": 0.7675, |
|
"step": 1547 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2470680959241702e-05, |
|
"loss": 0.6856, |
|
"step": 1548 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.244138491307231e-05, |
|
"loss": 0.7482, |
|
"step": 1549 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2412111915257771e-05, |
|
"loss": 0.7449, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.23828620195217e-05, |
|
"loss": 0.8686, |
|
"step": 1551 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2353635279545325e-05, |
|
"loss": 0.8173, |
|
"step": 1552 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2324431748967413e-05, |
|
"loss": 0.8125, |
|
"step": 1553 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2295251481384101e-05, |
|
"loss": 0.788, |
|
"step": 1554 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2266094530348812e-05, |
|
"loss": 0.7156, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2236960949372234e-05, |
|
"loss": 0.6775, |
|
"step": 1556 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2207850791922123e-05, |
|
"loss": 0.9304, |
|
"step": 1557 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2178764111423258e-05, |
|
"loss": 0.8204, |
|
"step": 1558 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2149700961257334e-05, |
|
"loss": 0.7543, |
|
"step": 1559 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2120661394762858e-05, |
|
"loss": 0.787, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2091645465235058e-05, |
|
"loss": 0.7496, |
|
"step": 1561 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2062653225925776e-05, |
|
"loss": 0.7551, |
|
"step": 1562 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2033684730043385e-05, |
|
"loss": 0.7674, |
|
"step": 1563 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.2004740030752676e-05, |
|
"loss": 0.7669, |
|
"step": 1564 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.1975819181174769e-05, |
|
"loss": 0.7482, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.1946922234387006e-05, |
|
"loss": 0.7732, |
|
"step": 1566 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1918049243422894e-05, |
|
"loss": 0.7507, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1889200261271923e-05, |
|
"loss": 0.7801, |
|
"step": 1568 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1860375340879549e-05, |
|
"loss": 0.7344, |
|
"step": 1569 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.183157453514708e-05, |
|
"loss": 0.7422, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1802797896931548e-05, |
|
"loss": 0.8728, |
|
"step": 1571 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1774045479045646e-05, |
|
"loss": 0.7828, |
|
"step": 1572 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1745317334257577e-05, |
|
"loss": 0.6144, |
|
"step": 1573 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1716613515291058e-05, |
|
"loss": 0.7564, |
|
"step": 1574 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1687934074825122e-05, |
|
"loss": 0.6821, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1659279065494064e-05, |
|
"loss": 0.8643, |
|
"step": 1576 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1630648539887352e-05, |
|
"loss": 0.6761, |
|
"step": 1577 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1602042550549508e-05, |
|
"loss": 0.847, |
|
"step": 1578 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1573461149980038e-05, |
|
"loss": 0.6638, |
|
"step": 1579 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1544904390633312e-05, |
|
"loss": 0.8545, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1516372324918478e-05, |
|
"loss": 0.7427, |
|
"step": 1581 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1487865005199364e-05, |
|
"loss": 0.8448, |
|
"step": 1582 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1459382483794382e-05, |
|
"loss": 0.8973, |
|
"step": 1583 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1430924812976429e-05, |
|
"loss": 0.6742, |
|
"step": 1584 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1402492044972826e-05, |
|
"loss": 0.6891, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1374084231965137e-05, |
|
"loss": 0.7441, |
|
"step": 1586 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1345701426089157e-05, |
|
"loss": 0.8483, |
|
"step": 1587 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1317343679434797e-05, |
|
"loss": 0.7646, |
|
"step": 1588 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.1289011044045972e-05, |
|
"loss": 0.8738, |
|
"step": 1589 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.126070357192048e-05, |
|
"loss": 0.7981, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1232421315009966e-05, |
|
"loss": 0.7359, |
|
"step": 1591 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.120416432521981e-05, |
|
"loss": 0.7633, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1175932654408997e-05, |
|
"loss": 0.6446, |
|
"step": 1593 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.114772635439005e-05, |
|
"loss": 0.6809, |
|
"step": 1594 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1119545476928939e-05, |
|
"loss": 0.7938, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1091390073744965e-05, |
|
"loss": 0.7758, |
|
"step": 1596 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1063260196510683e-05, |
|
"loss": 0.6296, |
|
"step": 1597 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1035155896851804e-05, |
|
"loss": 0.8407, |
|
"step": 1598 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.1007077226347087e-05, |
|
"loss": 0.756, |
|
"step": 1599 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.097902423652827e-05, |
|
"loss": 0.7576, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"eval_loss": 0.7660685777664185, |
|
"eval_runtime": 10.1738, |
|
"eval_samples_per_second": 3.637, |
|
"eval_steps_per_second": 0.491, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.095099697887994e-05, |
|
"loss": 0.857, |
|
"step": 1601 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.09229955048395e-05, |
|
"loss": 0.6867, |
|
"step": 1602 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0895019865796974e-05, |
|
"loss": 0.8768, |
|
"step": 1603 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.086707011309501e-05, |
|
"loss": 0.7735, |
|
"step": 1604 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0839146298028736e-05, |
|
"loss": 0.7836, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0811248471845694e-05, |
|
"loss": 0.9065, |
|
"step": 1606 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0783376685745723e-05, |
|
"loss": 0.6707, |
|
"step": 1607 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0755530990880834e-05, |
|
"loss": 0.6427, |
|
"step": 1608 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0727711438355213e-05, |
|
"loss": 0.8193, |
|
"step": 1609 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0699918079225033e-05, |
|
"loss": 0.8126, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0672150964498403e-05, |
|
"loss": 0.8801, |
|
"step": 1611 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.0644410145135267e-05, |
|
"loss": 0.7672, |
|
"step": 1612 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0616695672047305e-05, |
|
"loss": 0.681, |
|
"step": 1613 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0589007596097855e-05, |
|
"loss": 0.6755, |
|
"step": 1614 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0561345968101799e-05, |
|
"loss": 0.7523, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0533710838825489e-05, |
|
"loss": 0.624, |
|
"step": 1616 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0506102258986634e-05, |
|
"loss": 0.7993, |
|
"step": 1617 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0478520279254231e-05, |
|
"loss": 0.8003, |
|
"step": 1618 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0450964950248437e-05, |
|
"loss": 0.69, |
|
"step": 1619 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0423436322540545e-05, |
|
"loss": 0.7027, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.039593444665278e-05, |
|
"loss": 0.8992, |
|
"step": 1621 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0368459373058318e-05, |
|
"loss": 0.7951, |
|
"step": 1622 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0341011152181112e-05, |
|
"loss": 0.6282, |
|
"step": 1623 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.031358983439589e-05, |
|
"loss": 0.9396, |
|
"step": 1624 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0286195470027928e-05, |
|
"loss": 0.7106, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0258828109353086e-05, |
|
"loss": 0.8467, |
|
"step": 1626 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0231487802597665e-05, |
|
"loss": 0.7889, |
|
"step": 1627 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0204174599938293e-05, |
|
"loss": 0.931, |
|
"step": 1628 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0176888551501881e-05, |
|
"loss": 0.7254, |
|
"step": 1629 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0149629707365457e-05, |
|
"loss": 0.7016, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0122398117556184e-05, |
|
"loss": 0.7054, |
|
"step": 1631 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0095193832051168e-05, |
|
"loss": 0.6572, |
|
"step": 1632 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0068016900777411e-05, |
|
"loss": 0.8565, |
|
"step": 1633 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0040867373611715e-05, |
|
"loss": 0.7847, |
|
"step": 1634 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.0013745300380589e-05, |
|
"loss": 0.7425, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.986650730860156e-06, |
|
"loss": 0.8952, |
|
"step": 1636 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.959583714776063e-06, |
|
"loss": 0.835, |
|
"step": 1637 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.932544301803388e-06, |
|
"loss": 0.8751, |
|
"step": 1638 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.905532541566549e-06, |
|
"loss": 0.6323, |
|
"step": 1639 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.878548483639204e-06, |
|
"loss": 0.7731, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.851592177544208e-06, |
|
"loss": 0.7715, |
|
"step": 1641 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.824663672753453e-06, |
|
"loss": 0.6615, |
|
"step": 1642 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.797763018687795e-06, |
|
"loss": 0.8719, |
|
"step": 1643 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.770890264716992e-06, |
|
"loss": 0.7178, |
|
"step": 1644 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.744045460159624e-06, |
|
"loss": 0.7073, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.717228654282942e-06, |
|
"loss": 0.7443, |
|
"step": 1646 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.690439896302827e-06, |
|
"loss": 0.7285, |
|
"step": 1647 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.663679235383662e-06, |
|
"loss": 0.731, |
|
"step": 1648 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.636946720638306e-06, |
|
"loss": 0.8314, |
|
"step": 1649 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.610242401127934e-06, |
|
"loss": 0.7368, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.583566325861975e-06, |
|
"loss": 0.7324, |
|
"step": 1651 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.556918543798035e-06, |
|
"loss": 0.7315, |
|
"step": 1652 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.530299103841785e-06, |
|
"loss": 0.736, |
|
"step": 1653 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.503708054846885e-06, |
|
"loss": 0.8074, |
|
"step": 1654 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.477145445614888e-06, |
|
"loss": 0.701, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.450611324895156e-06, |
|
"loss": 0.7082, |
|
"step": 1656 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.42410574138476e-06, |
|
"loss": 0.6774, |
|
"step": 1657 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.397628743728399e-06, |
|
"loss": 0.7554, |
|
"step": 1658 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 9.371180380518333e-06, |
|
"loss": 0.7239, |
|
"step": 1659 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.344760700294231e-06, |
|
"loss": 0.8155, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.318369751543143e-06, |
|
"loss": 0.6981, |
|
"step": 1661 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.292007582699383e-06, |
|
"loss": 0.7622, |
|
"step": 1662 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.265674242144462e-06, |
|
"loss": 0.6517, |
|
"step": 1663 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.239369778206977e-06, |
|
"loss": 0.6636, |
|
"step": 1664 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.213094239162496e-06, |
|
"loss": 0.6402, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.18684767323355e-06, |
|
"loss": 0.8188, |
|
"step": 1666 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.160630128589473e-06, |
|
"loss": 0.6963, |
|
"step": 1667 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.134441653346337e-06, |
|
"loss": 0.8683, |
|
"step": 1668 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.108282295566873e-06, |
|
"loss": 0.7867, |
|
"step": 1669 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.082152103260363e-06, |
|
"loss": 0.8273, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.056051124382572e-06, |
|
"loss": 0.8001, |
|
"step": 1671 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.029979406835643e-06, |
|
"loss": 0.8615, |
|
"step": 1672 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.00393699846802e-06, |
|
"loss": 0.7993, |
|
"step": 1673 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.977923947074366e-06, |
|
"loss": 0.6579, |
|
"step": 1674 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.951940300395445e-06, |
|
"loss": 0.8094, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.92598610611807e-06, |
|
"loss": 0.7659, |
|
"step": 1676 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.900061411875022e-06, |
|
"loss": 0.6938, |
|
"step": 1677 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.87416626524489e-06, |
|
"loss": 0.8218, |
|
"step": 1678 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.84830071375207e-06, |
|
"loss": 0.7098, |
|
"step": 1679 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.822464804866652e-06, |
|
"loss": 0.8403, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.7966585860043e-06, |
|
"loss": 0.7814, |
|
"step": 1681 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 8.770882104526207e-06, |
|
"loss": 0.7178, |
|
"step": 1682 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.745135407738961e-06, |
|
"loss": 0.7318, |
|
"step": 1683 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.71941854289453e-06, |
|
"loss": 0.6442, |
|
"step": 1684 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.693731557190105e-06, |
|
"loss": 0.6537, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.66807449776805e-06, |
|
"loss": 0.754, |
|
"step": 1686 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.642447411715784e-06, |
|
"loss": 0.7928, |
|
"step": 1687 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.616850346065755e-06, |
|
"loss": 0.6301, |
|
"step": 1688 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.591283347795293e-06, |
|
"loss": 0.7883, |
|
"step": 1689 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.565746463826549e-06, |
|
"loss": 0.7844, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.540239741026405e-06, |
|
"loss": 0.651, |
|
"step": 1691 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.514763226206395e-06, |
|
"loss": 0.6677, |
|
"step": 1692 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.489316966122609e-06, |
|
"loss": 0.802, |
|
"step": 1693 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.463901007475614e-06, |
|
"loss": 0.8137, |
|
"step": 1694 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.438515396910362e-06, |
|
"loss": 0.7098, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.413160181016116e-06, |
|
"loss": 0.728, |
|
"step": 1696 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.387835406326338e-06, |
|
"loss": 0.5884, |
|
"step": 1697 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.362541119318659e-06, |
|
"loss": 0.8875, |
|
"step": 1698 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.337277366414728e-06, |
|
"loss": 0.772, |
|
"step": 1699 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.31204419398015e-06, |
|
"loss": 0.938, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"eval_loss": 0.7655941247940063, |
|
"eval_runtime": 10.1757, |
|
"eval_samples_per_second": 3.636, |
|
"eval_steps_per_second": 0.491, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.28684164832442e-06, |
|
"loss": 0.7552, |
|
"step": 1701 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.261669775700836e-06, |
|
"loss": 0.6922, |
|
"step": 1702 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.236528622306388e-06, |
|
"loss": 0.7711, |
|
"step": 1703 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.211418234281686e-06, |
|
"loss": 0.7662, |
|
"step": 1704 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 8.186338657710884e-06, |
|
"loss": 0.8212, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.16128993862159e-06, |
|
"loss": 0.7022, |
|
"step": 1706 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.136272122984778e-06, |
|
"loss": 0.7451, |
|
"step": 1707 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.1112852567147e-06, |
|
"loss": 0.7028, |
|
"step": 1708 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.086329385668822e-06, |
|
"loss": 0.6798, |
|
"step": 1709 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.061404555647711e-06, |
|
"loss": 0.8119, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.036510812394967e-06, |
|
"loss": 0.9166, |
|
"step": 1711 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 8.011648201597166e-06, |
|
"loss": 0.5711, |
|
"step": 1712 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.986816768883699e-06, |
|
"loss": 0.6761, |
|
"step": 1713 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.962016559826772e-06, |
|
"loss": 0.723, |
|
"step": 1714 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.937247619941265e-06, |
|
"loss": 0.7273, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.912509994684709e-06, |
|
"loss": 0.805, |
|
"step": 1716 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.88780372945713e-06, |
|
"loss": 0.8572, |
|
"step": 1717 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.863128869600998e-06, |
|
"loss": 0.8044, |
|
"step": 1718 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.838485460401154e-06, |
|
"loss": 0.7109, |
|
"step": 1719 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.813873547084741e-06, |
|
"loss": 0.7738, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.789293174821077e-06, |
|
"loss": 0.6835, |
|
"step": 1721 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.764744388721576e-06, |
|
"loss": 0.8413, |
|
"step": 1722 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.740227233839725e-06, |
|
"loss": 0.8831, |
|
"step": 1723 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.71574175517093e-06, |
|
"loss": 0.7507, |
|
"step": 1724 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.69128799765247e-06, |
|
"loss": 0.8442, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.66686600616341e-06, |
|
"loss": 0.8111, |
|
"step": 1726 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.642475825524514e-06, |
|
"loss": 0.9816, |
|
"step": 1727 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 7.618117500498165e-06, |
|
"loss": 0.7127, |
|
"step": 1728 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.593791075788281e-06, |
|
"loss": 0.7663, |
|
"step": 1729 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.5694965960402375e-06, |
|
"loss": 0.6685, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.5452341058407816e-06, |
|
"loss": 0.8091, |
|
"step": 1731 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.521003649717947e-06, |
|
"loss": 0.6338, |
|
"step": 1732 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.496805272140981e-06, |
|
"loss": 0.8271, |
|
"step": 1733 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.472639017520272e-06, |
|
"loss": 0.856, |
|
"step": 1734 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.4485049302072205e-06, |
|
"loss": 0.7441, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.424403054494211e-06, |
|
"loss": 0.6767, |
|
"step": 1736 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.400333434614528e-06, |
|
"loss": 0.6676, |
|
"step": 1737 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.376296114742231e-06, |
|
"loss": 0.8644, |
|
"step": 1738 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.3522911389921176e-06, |
|
"loss": 0.7289, |
|
"step": 1739 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.3283185514196e-06, |
|
"loss": 0.6892, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.304378396020689e-06, |
|
"loss": 0.7766, |
|
"step": 1741 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.280470716731843e-06, |
|
"loss": 0.6815, |
|
"step": 1742 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.256595557429935e-06, |
|
"loss": 0.7115, |
|
"step": 1743 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.232752961932141e-06, |
|
"loss": 0.7781, |
|
"step": 1744 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.208942973995886e-06, |
|
"loss": 0.7154, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.185165637318747e-06, |
|
"loss": 0.8243, |
|
"step": 1746 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.161420995538373e-06, |
|
"loss": 0.7906, |
|
"step": 1747 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.13770909223242e-06, |
|
"loss": 0.6554, |
|
"step": 1748 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.114029970918451e-06, |
|
"loss": 0.7639, |
|
"step": 1749 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.090383675053866e-06, |
|
"loss": 0.6918, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 7.066770248035823e-06, |
|
"loss": 0.8866, |
|
"step": 1751 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.043189733201175e-06, |
|
"loss": 0.6485, |
|
"step": 1752 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 7.019642173826335e-06, |
|
"loss": 0.7619, |
|
"step": 1753 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.9961276131272565e-06, |
|
"loss": 0.8776, |
|
"step": 1754 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.972646094259342e-06, |
|
"loss": 0.7731, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.949197660317344e-06, |
|
"loss": 0.6611, |
|
"step": 1756 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.925782354335278e-06, |
|
"loss": 0.8898, |
|
"step": 1757 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.902400219286373e-06, |
|
"loss": 0.8171, |
|
"step": 1758 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.879051298083003e-06, |
|
"loss": 0.8203, |
|
"step": 1759 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.855735633576557e-06, |
|
"loss": 0.7698, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.832453268557398e-06, |
|
"loss": 0.8323, |
|
"step": 1761 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.8092042457547775e-06, |
|
"loss": 0.7024, |
|
"step": 1762 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.785988607836755e-06, |
|
"loss": 0.7692, |
|
"step": 1763 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.762806397410118e-06, |
|
"loss": 0.6685, |
|
"step": 1764 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.739657657020309e-06, |
|
"loss": 0.8867, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.716542429151343e-06, |
|
"loss": 0.795, |
|
"step": 1766 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.693460756225725e-06, |
|
"loss": 0.6574, |
|
"step": 1767 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.670412680604379e-06, |
|
"loss": 0.7285, |
|
"step": 1768 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.647398244586595e-06, |
|
"loss": 0.7736, |
|
"step": 1769 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.624417490409882e-06, |
|
"loss": 0.7106, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.601470460249959e-06, |
|
"loss": 0.8025, |
|
"step": 1771 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.578557196220647e-06, |
|
"loss": 0.8226, |
|
"step": 1772 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.555677740373812e-06, |
|
"loss": 0.8037, |
|
"step": 1773 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.53283213469926e-06, |
|
"loss": 0.8083, |
|
"step": 1774 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.510020421124649e-06, |
|
"loss": 0.8245, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.487242641515489e-06, |
|
"loss": 0.8152, |
|
"step": 1776 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.4644988376749705e-06, |
|
"loss": 0.9058, |
|
"step": 1777 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.441789051343955e-06, |
|
"loss": 0.7909, |
|
"step": 1778 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.419113324200835e-06, |
|
"loss": 0.7437, |
|
"step": 1779 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.3964716978615416e-06, |
|
"loss": 0.7873, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.373864213879402e-06, |
|
"loss": 0.7341, |
|
"step": 1781 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.351290913745076e-06, |
|
"loss": 0.8479, |
|
"step": 1782 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.328751838886496e-06, |
|
"loss": 0.7774, |
|
"step": 1783 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.306247030668783e-06, |
|
"loss": 0.7135, |
|
"step": 1784 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.283776530394162e-06, |
|
"loss": 0.6989, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.261340379301894e-06, |
|
"loss": 0.7747, |
|
"step": 1786 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.238938618568227e-06, |
|
"loss": 0.7521, |
|
"step": 1787 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.2165712893062476e-06, |
|
"loss": 0.7549, |
|
"step": 1788 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.194238432565885e-06, |
|
"loss": 0.8281, |
|
"step": 1789 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.171940089333786e-06, |
|
"loss": 0.7668, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.14967630053328e-06, |
|
"loss": 0.7116, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.127447107024248e-06, |
|
"loss": 0.5955, |
|
"step": 1792 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.105252549603088e-06, |
|
"loss": 0.7986, |
|
"step": 1793 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.083092669002658e-06, |
|
"loss": 0.6424, |
|
"step": 1794 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.060967505892143e-06, |
|
"loss": 0.6974, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.038877100877036e-06, |
|
"loss": 0.7831, |
|
"step": 1796 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.016821494499003e-06, |
|
"loss": 0.8403, |
|
"step": 1797 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 5.994800727235894e-06, |
|
"loss": 0.6605, |
|
"step": 1798 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.972814839501589e-06, |
|
"loss": 0.8305, |
|
"step": 1799 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.950863871645962e-06, |
|
"loss": 0.8028, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 0.7649816274642944, |
|
"eval_runtime": 10.1798, |
|
"eval_samples_per_second": 3.635, |
|
"eval_steps_per_second": 0.491, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.928947863954798e-06, |
|
"loss": 0.8021, |
|
"step": 1801 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.907066856649729e-06, |
|
"loss": 0.687, |
|
"step": 1802 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.88522088988814e-06, |
|
"loss": 0.8061, |
|
"step": 1803 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.86341000376312e-06, |
|
"loss": 0.6787, |
|
"step": 1804 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.841634238303365e-06, |
|
"loss": 0.6511, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.81989363347312e-06, |
|
"loss": 0.846, |
|
"step": 1806 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.7981882291720944e-06, |
|
"loss": 0.6861, |
|
"step": 1807 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.77651806523542e-06, |
|
"loss": 0.5914, |
|
"step": 1808 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.754883181433529e-06, |
|
"loss": 0.7258, |
|
"step": 1809 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.7332836174721014e-06, |
|
"loss": 0.8925, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.711719412992006e-06, |
|
"loss": 0.8861, |
|
"step": 1811 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.690190607569229e-06, |
|
"loss": 0.7975, |
|
"step": 1812 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.668697240714782e-06, |
|
"loss": 0.7744, |
|
"step": 1813 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.647239351874614e-06, |
|
"loss": 0.6823, |
|
"step": 1814 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.625816980429599e-06, |
|
"loss": 0.7567, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.60443016569541e-06, |
|
"loss": 0.796, |
|
"step": 1816 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.58307894692246e-06, |
|
"loss": 0.8224, |
|
"step": 1817 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.561763363295841e-06, |
|
"loss": 0.798, |
|
"step": 1818 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.540483453935247e-06, |
|
"loss": 0.7715, |
|
"step": 1819 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.519239257894893e-06, |
|
"loss": 0.7577, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 5.498030814163454e-06, |
|
"loss": 1.0168, |
|
"step": 1821 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.476858161663994e-06, |
|
"loss": 0.8213, |
|
"step": 1822 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.455721339253886e-06, |
|
"loss": 0.8093, |
|
"step": 1823 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.434620385724745e-06, |
|
"loss": 0.8216, |
|
"step": 1824 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.413555339802356e-06, |
|
"loss": 0.7165, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.392526240146622e-06, |
|
"loss": 0.8417, |
|
"step": 1826 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.371533125351444e-06, |
|
"loss": 0.7238, |
|
"step": 1827 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.350576033944704e-06, |
|
"loss": 0.7569, |
|
"step": 1828 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.329655004388157e-06, |
|
"loss": 0.8378, |
|
"step": 1829 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.308770075077396e-06, |
|
"loss": 0.7627, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.2879212843417485e-06, |
|
"loss": 0.8093, |
|
"step": 1831 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.267108670444199e-06, |
|
"loss": 0.7012, |
|
"step": 1832 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.246332271581375e-06, |
|
"loss": 0.6478, |
|
"step": 1833 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.225592125883416e-06, |
|
"loss": 0.869, |
|
"step": 1834 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.20488827141393e-06, |
|
"loss": 0.6629, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.184220746169927e-06, |
|
"loss": 0.8603, |
|
"step": 1836 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.163589588081741e-06, |
|
"loss": 0.7278, |
|
"step": 1837 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.142994835012957e-06, |
|
"loss": 0.7023, |
|
"step": 1838 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.122436524760357e-06, |
|
"loss": 0.9379, |
|
"step": 1839 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.101914695053836e-06, |
|
"loss": 0.7581, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.081429383556333e-06, |
|
"loss": 0.7618, |
|
"step": 1841 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.060980627863776e-06, |
|
"loss": 0.7541, |
|
"step": 1842 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.040568465504991e-06, |
|
"loss": 0.8867, |
|
"step": 1843 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 5.020192933941672e-06, |
|
"loss": 0.8828, |
|
"step": 1844 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.99985407056825e-06, |
|
"loss": 0.7256, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.979551912711872e-06, |
|
"loss": 0.8781, |
|
"step": 1846 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.959286497632345e-06, |
|
"loss": 0.8466, |
|
"step": 1847 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.939057862522023e-06, |
|
"loss": 0.7232, |
|
"step": 1848 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.918866044505746e-06, |
|
"loss": 0.6573, |
|
"step": 1849 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.8987110806408e-06, |
|
"loss": 0.77, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.878593007916849e-06, |
|
"loss": 0.6516, |
|
"step": 1851 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.8585118632558265e-06, |
|
"loss": 0.7282, |
|
"step": 1852 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.838467683511913e-06, |
|
"loss": 0.8151, |
|
"step": 1853 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.81846050547142e-06, |
|
"loss": 0.6609, |
|
"step": 1854 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.798490365852787e-06, |
|
"loss": 0.802, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.778557301306455e-06, |
|
"loss": 0.7106, |
|
"step": 1856 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.758661348414833e-06, |
|
"loss": 0.7277, |
|
"step": 1857 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.738802543692214e-06, |
|
"loss": 0.7965, |
|
"step": 1858 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.718980923584715e-06, |
|
"loss": 0.8508, |
|
"step": 1859 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.699196524470215e-06, |
|
"loss": 0.8593, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.679449382658277e-06, |
|
"loss": 0.7577, |
|
"step": 1861 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.659739534390089e-06, |
|
"loss": 0.7016, |
|
"step": 1862 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.640067015838398e-06, |
|
"loss": 0.7406, |
|
"step": 1863 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.62043186310743e-06, |
|
"loss": 0.7154, |
|
"step": 1864 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.600834112232855e-06, |
|
"loss": 0.8136, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.581273799181693e-06, |
|
"loss": 0.7784, |
|
"step": 1866 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.561750959852238e-06, |
|
"loss": 0.7978, |
|
"step": 1867 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 4.542265630074022e-06, |
|
"loss": 0.7299, |
|
"step": 1868 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.5228178456077575e-06, |
|
"loss": 0.5917, |
|
"step": 1869 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.503407642145222e-06, |
|
"loss": 0.6906, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.484035055309235e-06, |
|
"loss": 0.7447, |
|
"step": 1871 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.4647001206535785e-06, |
|
"loss": 0.7769, |
|
"step": 1872 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.4454028736629324e-06, |
|
"loss": 0.7974, |
|
"step": 1873 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.426143349752807e-06, |
|
"loss": 0.7232, |
|
"step": 1874 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.406921584269486e-06, |
|
"loss": 0.7412, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.3877376124899535e-06, |
|
"loss": 0.7451, |
|
"step": 1876 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.3685914696218326e-06, |
|
"loss": 0.7912, |
|
"step": 1877 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.3494831908033166e-06, |
|
"loss": 0.8545, |
|
"step": 1878 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.330412811103126e-06, |
|
"loss": 0.6788, |
|
"step": 1879 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.311380365520401e-06, |
|
"loss": 0.7453, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.2923858889846735e-06, |
|
"loss": 0.778, |
|
"step": 1881 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.273429416355792e-06, |
|
"loss": 0.8369, |
|
"step": 1882 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.254510982423876e-06, |
|
"loss": 0.6613, |
|
"step": 1883 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.235630621909201e-06, |
|
"loss": 0.851, |
|
"step": 1884 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.216788369462176e-06, |
|
"loss": 0.9104, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.197984259663304e-06, |
|
"loss": 0.8859, |
|
"step": 1886 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.179218327023046e-06, |
|
"loss": 0.7183, |
|
"step": 1887 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.160490605981823e-06, |
|
"loss": 0.8198, |
|
"step": 1888 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.1418011309099e-06, |
|
"loss": 0.927, |
|
"step": 1889 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.12314993610739e-06, |
|
"loss": 0.7529, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 4.104537055804119e-06, |
|
"loss": 0.5568, |
|
"step": 1891 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.085962524159612e-06, |
|
"loss": 0.6771, |
|
"step": 1892 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.067426375263009e-06, |
|
"loss": 0.7252, |
|
"step": 1893 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.048928643133007e-06, |
|
"loss": 0.6822, |
|
"step": 1894 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.030469361717801e-06, |
|
"loss": 0.8225, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 4.012048564895016e-06, |
|
"loss": 0.902, |
|
"step": 1896 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.993666286471648e-06, |
|
"loss": 0.631, |
|
"step": 1897 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.975322560184006e-06, |
|
"loss": 0.7094, |
|
"step": 1898 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.957017419697637e-06, |
|
"loss": 0.8937, |
|
"step": 1899 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.938750898607277e-06, |
|
"loss": 0.7126, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"eval_loss": 0.764629065990448, |
|
"eval_runtime": 10.2072, |
|
"eval_samples_per_second": 3.625, |
|
"eval_steps_per_second": 0.49, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.920523030436801e-06, |
|
"loss": 0.7364, |
|
"step": 1901 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.902333848639117e-06, |
|
"loss": 0.6917, |
|
"step": 1902 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.884183386596143e-06, |
|
"loss": 0.8181, |
|
"step": 1903 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.866071677618757e-06, |
|
"loss": 0.5936, |
|
"step": 1904 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.847998754946691e-06, |
|
"loss": 0.8222, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.829964651748507e-06, |
|
"loss": 0.8051, |
|
"step": 1906 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.8119694011215013e-06, |
|
"loss": 0.7745, |
|
"step": 1907 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.7940130360916954e-06, |
|
"loss": 0.7419, |
|
"step": 1908 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.776095589613732e-06, |
|
"loss": 0.7021, |
|
"step": 1909 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.7582170945708234e-06, |
|
"loss": 0.6946, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.7403775837747013e-06, |
|
"loss": 0.8118, |
|
"step": 1911 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.722577089965551e-06, |
|
"loss": 0.6971, |
|
"step": 1912 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.7048156458119514e-06, |
|
"loss": 0.7887, |
|
"step": 1913 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 3.6870932839108142e-06, |
|
"loss": 0.712, |
|
"step": 1914 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6694100367873243e-06, |
|
"loss": 0.7373, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6517659368948797e-06, |
|
"loss": 0.6492, |
|
"step": 1916 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.6341610166150314e-06, |
|
"loss": 0.7008, |
|
"step": 1917 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.616595308257445e-06, |
|
"loss": 0.7586, |
|
"step": 1918 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.5990688440597887e-06, |
|
"loss": 0.6507, |
|
"step": 1919 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.5815816561877267e-06, |
|
"loss": 0.6318, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.5641337767348375e-06, |
|
"loss": 0.8347, |
|
"step": 1921 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.54672523772257e-06, |
|
"loss": 0.7416, |
|
"step": 1922 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.529356071100162e-06, |
|
"loss": 0.6922, |
|
"step": 1923 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.5120263087445785e-06, |
|
"loss": 0.6557, |
|
"step": 1924 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.494735982460487e-06, |
|
"loss": 0.8388, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.4774851239801815e-06, |
|
"loss": 0.8815, |
|
"step": 1926 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.460273764963509e-06, |
|
"loss": 0.6565, |
|
"step": 1927 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.443101936997836e-06, |
|
"loss": 0.7103, |
|
"step": 1928 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.4259696715979672e-06, |
|
"loss": 0.8535, |
|
"step": 1929 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.4088770002061095e-06, |
|
"loss": 0.6955, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.3918239541918027e-06, |
|
"loss": 0.7694, |
|
"step": 1931 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.3748105648518573e-06, |
|
"loss": 0.7971, |
|
"step": 1932 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.357836863410313e-06, |
|
"loss": 0.7688, |
|
"step": 1933 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.340902881018365e-06, |
|
"loss": 0.8168, |
|
"step": 1934 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.3240086487543064e-06, |
|
"loss": 0.7653, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.3071541976235103e-06, |
|
"loss": 0.6909, |
|
"step": 1936 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.2903395585582974e-06, |
|
"loss": 0.7788, |
|
"step": 1937 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.273564762417952e-06, |
|
"loss": 0.7742, |
|
"step": 1938 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.2568298399886177e-06, |
|
"loss": 0.705, |
|
"step": 1939 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.240134821983287e-06, |
|
"loss": 0.6656, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.223479739041693e-06, |
|
"loss": 0.7305, |
|
"step": 1941 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.206864621730271e-06, |
|
"loss": 0.782, |
|
"step": 1942 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.1902895005421403e-06, |
|
"loss": 0.7546, |
|
"step": 1943 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.1737544058969894e-06, |
|
"loss": 0.9651, |
|
"step": 1944 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.157259368141066e-06, |
|
"loss": 0.8388, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.1408044175470748e-06, |
|
"loss": 0.7826, |
|
"step": 1946 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.1243895843141857e-06, |
|
"loss": 0.7102, |
|
"step": 1947 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.108014898567921e-06, |
|
"loss": 0.6304, |
|
"step": 1948 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.0916803903601243e-06, |
|
"loss": 0.6889, |
|
"step": 1949 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.075386089668911e-06, |
|
"loss": 0.7667, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.0591320263985925e-06, |
|
"loss": 0.7634, |
|
"step": 1951 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.042918230379646e-06, |
|
"loss": 0.777, |
|
"step": 1952 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.0267447313686414e-06, |
|
"loss": 0.8342, |
|
"step": 1953 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 3.0106115590481933e-06, |
|
"loss": 0.8288, |
|
"step": 1954 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.9945187430269113e-06, |
|
"loss": 0.688, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.978466312839323e-06, |
|
"loss": 0.7308, |
|
"step": 1956 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.9624542979458712e-06, |
|
"loss": 0.704, |
|
"step": 1957 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.9464827277327993e-06, |
|
"loss": 0.8687, |
|
"step": 1958 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.930551631512121e-06, |
|
"loss": 0.7719, |
|
"step": 1959 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 2.9146610385215793e-06, |
|
"loss": 0.7821, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.8988109779245958e-06, |
|
"loss": 0.7707, |
|
"step": 1961 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.883001478810185e-06, |
|
"loss": 0.7377, |
|
"step": 1962 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.867232570192932e-06, |
|
"loss": 0.7996, |
|
"step": 1963 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.8515042810129066e-06, |
|
"loss": 0.6835, |
|
"step": 1964 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.835816640135666e-06, |
|
"loss": 0.679, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.8201696763521385e-06, |
|
"loss": 0.7733, |
|
"step": 1966 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.804563418378614e-06, |
|
"loss": 0.6225, |
|
"step": 1967 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.788997894856671e-06, |
|
"loss": 0.7932, |
|
"step": 1968 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.77347313435313e-06, |
|
"loss": 0.6979, |
|
"step": 1969 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.7579891653600005e-06, |
|
"loss": 0.7177, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.7425460162944283e-06, |
|
"loss": 0.7344, |
|
"step": 1971 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.727143715498648e-06, |
|
"loss": 0.7756, |
|
"step": 1972 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.7117822912399228e-06, |
|
"loss": 0.6967, |
|
"step": 1973 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6964617717104928e-06, |
|
"loss": 0.6205, |
|
"step": 1974 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.681182185027545e-06, |
|
"loss": 0.7575, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.665943559233125e-06, |
|
"loss": 0.6713, |
|
"step": 1976 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6507459222941096e-06, |
|
"loss": 0.8165, |
|
"step": 1977 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.6355893021021426e-06, |
|
"loss": 0.6022, |
|
"step": 1978 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.620473726473616e-06, |
|
"loss": 0.7803, |
|
"step": 1979 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.605399223149574e-06, |
|
"loss": 0.6947, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.590365819795676e-06, |
|
"loss": 0.8219, |
|
"step": 1981 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5753735440021744e-06, |
|
"loss": 0.7632, |
|
"step": 1982 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.5604224232838287e-06, |
|
"loss": 0.6795, |
|
"step": 1983 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.545512485079865e-06, |
|
"loss": 0.6966, |
|
"step": 1984 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.5306437567539367e-06, |
|
"loss": 0.8175, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.515816265594062e-06, |
|
"loss": 0.7078, |
|
"step": 1986 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.5010300388125796e-06, |
|
"loss": 0.7445, |
|
"step": 1987 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.4862851035460932e-06, |
|
"loss": 0.832, |
|
"step": 1988 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.4715814868554314e-06, |
|
"loss": 0.7203, |
|
"step": 1989 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.456919215725592e-06, |
|
"loss": 0.8136, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.442298317065686e-06, |
|
"loss": 0.715, |
|
"step": 1991 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.427718817708899e-06, |
|
"loss": 0.6885, |
|
"step": 1992 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.413180744412449e-06, |
|
"loss": 0.7855, |
|
"step": 1993 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3986841238575087e-06, |
|
"loss": 0.7622, |
|
"step": 1994 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3842289826491807e-06, |
|
"loss": 0.7651, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3698153473164397e-06, |
|
"loss": 0.7747, |
|
"step": 1996 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3554432443121e-06, |
|
"loss": 0.6726, |
|
"step": 1997 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3411127000127405e-06, |
|
"loss": 0.7775, |
|
"step": 1998 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.3268237407186618e-06, |
|
"loss": 0.7703, |
|
"step": 1999 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 2.312576392653862e-06, |
|
"loss": 0.7135, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"eval_loss": 0.7643298506736755, |
|
"eval_runtime": 10.1821, |
|
"eval_samples_per_second": 3.634, |
|
"eval_steps_per_second": 0.491, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 2320, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 2.812517994725376e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|