|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"global_step": 714230, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9964997269787046e-05, |
|
"loss": 4.3041, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.992999453957409e-05, |
|
"loss": 4.113, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.989499180936113e-05, |
|
"loss": 4.0791, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.985998907914818e-05, |
|
"loss": 4.0661, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.982498634893522e-05, |
|
"loss": 4.0541, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9789983618722267e-05, |
|
"loss": 4.0411, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.9754980888509304e-05, |
|
"loss": 4.0271, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.971997815829635e-05, |
|
"loss": 4.0247, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.968497542808339e-05, |
|
"loss": 4.0151, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9649972697870436e-05, |
|
"loss": 4.0121, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.961496996765748e-05, |
|
"loss": 4.0123, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9579967237444524e-05, |
|
"loss": 4.0137, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.954496450723157e-05, |
|
"loss": 3.996, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.950996177701861e-05, |
|
"loss": 3.9928, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.947495904680565e-05, |
|
"loss": 3.9929, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.943995631659269e-05, |
|
"loss": 3.9941, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.940495358637974e-05, |
|
"loss": 3.9848, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.936995085616679e-05, |
|
"loss": 3.9808, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9334948125953825e-05, |
|
"loss": 3.9829, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.929994539574087e-05, |
|
"loss": 3.9842, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.926494266552791e-05, |
|
"loss": 3.9757, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.922993993531496e-05, |
|
"loss": 3.9731, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9194937205102e-05, |
|
"loss": 3.9698, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.915993447488904e-05, |
|
"loss": 3.9659, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.912493174467609e-05, |
|
"loss": 3.9634, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.908992901446313e-05, |
|
"loss": 3.968, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.905492628425018e-05, |
|
"loss": 3.9598, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9019923554037215e-05, |
|
"loss": 3.9563, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.898492082382426e-05, |
|
"loss": 3.9652, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.89499180936113e-05, |
|
"loss": 3.9558, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.891491536339835e-05, |
|
"loss": 3.9535, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.887991263318539e-05, |
|
"loss": 3.949, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.8844909902972435e-05, |
|
"loss": 3.9575, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.880990717275948e-05, |
|
"loss": 3.9468, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.877490444254652e-05, |
|
"loss": 3.948, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.873990171233356e-05, |
|
"loss": 3.9439, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8704898982120604e-05, |
|
"loss": 3.9537, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.866989625190765e-05, |
|
"loss": 3.9518, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.86348935216947e-05, |
|
"loss": 3.9378, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8599890791481736e-05, |
|
"loss": 3.9443, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 3.860203266143799, |
|
"eval_runtime": 181.0236, |
|
"eval_samples_per_second": 222.684, |
|
"eval_steps_per_second": 18.561, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.856488806126878e-05, |
|
"loss": 3.9419, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8529885331055824e-05, |
|
"loss": 3.932, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.849488260084287e-05, |
|
"loss": 3.9433, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.845987987062991e-05, |
|
"loss": 3.9353, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.842487714041695e-05, |
|
"loss": 3.9361, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8389874410204e-05, |
|
"loss": 3.9324, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8354871679991044e-05, |
|
"loss": 3.9351, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.831986894977809e-05, |
|
"loss": 3.9233, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.8284866219565125e-05, |
|
"loss": 3.9378, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.824986348935217e-05, |
|
"loss": 3.9337, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.8214860759139214e-05, |
|
"loss": 3.9281, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.8179858028926264e-05, |
|
"loss": 3.9264, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.81448552987133e-05, |
|
"loss": 3.9248, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.8109852568500346e-05, |
|
"loss": 3.9278, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.807484983828739e-05, |
|
"loss": 3.9227, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.8039847108074434e-05, |
|
"loss": 3.9262, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.800484437786148e-05, |
|
"loss": 3.9247, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.7969841647648515e-05, |
|
"loss": 3.9132, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.7934838917435566e-05, |
|
"loss": 3.9221, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.789983618722261e-05, |
|
"loss": 3.9179, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.786483345700965e-05, |
|
"loss": 3.9187, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.782983072679669e-05, |
|
"loss": 3.9134, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.7794827996583735e-05, |
|
"loss": 3.9129, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.775982526637078e-05, |
|
"loss": 3.9129, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.772482253615782e-05, |
|
"loss": 3.9125, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.768981980594487e-05, |
|
"loss": 3.9141, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.765481707573191e-05, |
|
"loss": 3.9118, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7619814345518955e-05, |
|
"loss": 3.9151, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7584811615306e-05, |
|
"loss": 3.9148, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7549808885093036e-05, |
|
"loss": 3.9083, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.751480615488008e-05, |
|
"loss": 3.9019, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7479803424667124e-05, |
|
"loss": 3.911, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7444800694454175e-05, |
|
"loss": 3.9007, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.740979796424121e-05, |
|
"loss": 3.9045, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7374795234028256e-05, |
|
"loss": 3.8983, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.73397925038153e-05, |
|
"loss": 3.9049, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.7304789773602344e-05, |
|
"loss": 3.9066, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.726978704338939e-05, |
|
"loss": 3.8962, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.7234784313176426e-05, |
|
"loss": 3.904, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.7199781582963477e-05, |
|
"loss": 3.903, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"eval_loss": 3.829373836517334, |
|
"eval_runtime": 181.3502, |
|
"eval_samples_per_second": 222.283, |
|
"eval_steps_per_second": 18.528, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.716477885275052e-05, |
|
"loss": 3.8959, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.712977612253756e-05, |
|
"loss": 3.909, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.70947733923246e-05, |
|
"loss": 3.9008, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.7059770662111646e-05, |
|
"loss": 3.8976, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.702476793189869e-05, |
|
"loss": 3.8961, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.6989765201685734e-05, |
|
"loss": 3.8966, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.695476247147278e-05, |
|
"loss": 3.9003, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.691975974125982e-05, |
|
"loss": 3.8936, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.6884757011046866e-05, |
|
"loss": 3.8862, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.684975428083391e-05, |
|
"loss": 3.8988, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.681475155062095e-05, |
|
"loss": 3.8963, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.677974882040799e-05, |
|
"loss": 3.8896, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6744746090195035e-05, |
|
"loss": 3.8937, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6709743359982086e-05, |
|
"loss": 3.8854, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.667474062976912e-05, |
|
"loss": 3.8928, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.663973789955617e-05, |
|
"loss": 3.8937, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.660473516934321e-05, |
|
"loss": 3.8947, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6569732439130255e-05, |
|
"loss": 3.8899, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.65347297089173e-05, |
|
"loss": 3.8875, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6499726978704337e-05, |
|
"loss": 3.8816, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.646472424849139e-05, |
|
"loss": 3.8923, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.642972151827843e-05, |
|
"loss": 3.8914, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6394718788065475e-05, |
|
"loss": 3.8815, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.635971605785251e-05, |
|
"loss": 3.8878, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.632471332763956e-05, |
|
"loss": 3.8865, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.62897105974266e-05, |
|
"loss": 3.8817, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.6254707867213645e-05, |
|
"loss": 3.8787, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.621970513700069e-05, |
|
"loss": 3.8847, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.618470240678773e-05, |
|
"loss": 3.8908, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.614969967657478e-05, |
|
"loss": 3.8768, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.611469694636182e-05, |
|
"loss": 3.8864, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.607969421614886e-05, |
|
"loss": 3.887, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.60446914859359e-05, |
|
"loss": 3.8856, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.6009688755722946e-05, |
|
"loss": 3.8804, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.597468602551e-05, |
|
"loss": 3.8896, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.5939683295297034e-05, |
|
"loss": 3.873, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.590468056508408e-05, |
|
"loss": 3.881, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.586967783487112e-05, |
|
"loss": 3.8758, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.5834675104658166e-05, |
|
"loss": 3.8766, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.579967237444521e-05, |
|
"loss": 3.8758, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"eval_loss": 3.810927152633667, |
|
"eval_runtime": 180.6348, |
|
"eval_samples_per_second": 223.163, |
|
"eval_steps_per_second": 18.601, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.576466964423225e-05, |
|
"loss": 3.8766, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.57296669140193e-05, |
|
"loss": 3.8799, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.569466418380634e-05, |
|
"loss": 3.8775, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.5659661453593386e-05, |
|
"loss": 3.8678, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.5624658723380424e-05, |
|
"loss": 3.8907, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.558965599316747e-05, |
|
"loss": 3.8713, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.555465326295451e-05, |
|
"loss": 3.8703, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.5519650532741556e-05, |
|
"loss": 3.8774, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.54846478025286e-05, |
|
"loss": 3.8669, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.5449645072315644e-05, |
|
"loss": 3.876, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.541464234210269e-05, |
|
"loss": 3.8745, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.537963961188973e-05, |
|
"loss": 3.8622, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.534463688167677e-05, |
|
"loss": 3.8757, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.530963415146381e-05, |
|
"loss": 3.8675, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.527463142125086e-05, |
|
"loss": 3.8642, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.523962869103791e-05, |
|
"loss": 3.8696, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.5204625960824945e-05, |
|
"loss": 3.8717, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.516962323061199e-05, |
|
"loss": 3.8685, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.513462050039903e-05, |
|
"loss": 3.8668, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.509961777018608e-05, |
|
"loss": 3.8664, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.506461503997312e-05, |
|
"loss": 3.874, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.502961230976016e-05, |
|
"loss": 3.8717, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.499460957954721e-05, |
|
"loss": 3.8747, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.495960684933425e-05, |
|
"loss": 3.8654, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.49246041191213e-05, |
|
"loss": 3.8667, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.4889601388908334e-05, |
|
"loss": 3.8598, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.485459865869538e-05, |
|
"loss": 3.8614, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.481959592848242e-05, |
|
"loss": 3.8645, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.4784593198269466e-05, |
|
"loss": 3.864, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.474959046805651e-05, |
|
"loss": 3.8587, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4714587737843555e-05, |
|
"loss": 3.8551, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.46795850076306e-05, |
|
"loss": 3.8631, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.464458227741764e-05, |
|
"loss": 3.8598, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.460957954720468e-05, |
|
"loss": 3.8644, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4574576816991724e-05, |
|
"loss": 3.8555, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4539574086778775e-05, |
|
"loss": 3.865, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.450457135656582e-05, |
|
"loss": 3.8631, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4469568626352856e-05, |
|
"loss": 3.8587, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.44345658961399e-05, |
|
"loss": 3.8624, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4399563165926944e-05, |
|
"loss": 3.8614, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"eval_loss": 3.797558307647705, |
|
"eval_runtime": 180.6466, |
|
"eval_samples_per_second": 223.148, |
|
"eval_steps_per_second": 18.6, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.436456043571399e-05, |
|
"loss": 3.8477, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.432955770550103e-05, |
|
"loss": 3.8574, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.4294554975288076e-05, |
|
"loss": 3.8631, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.425955224507512e-05, |
|
"loss": 3.8539, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.4224549514862164e-05, |
|
"loss": 3.8609, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.418954678464921e-05, |
|
"loss": 3.8519, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.4154544054436245e-05, |
|
"loss": 3.8579, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.411954132422329e-05, |
|
"loss": 3.8534, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.408453859401033e-05, |
|
"loss": 3.8487, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.4049535863797384e-05, |
|
"loss": 3.8589, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.401453313358442e-05, |
|
"loss": 3.8543, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3979530403371465e-05, |
|
"loss": 3.86, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.394452767315851e-05, |
|
"loss": 3.8569, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3909524942945553e-05, |
|
"loss": 3.8575, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.387452221273259e-05, |
|
"loss": 3.8564, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3839519482519635e-05, |
|
"loss": 3.8528, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.3804516752306685e-05, |
|
"loss": 3.8548, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.376951402209373e-05, |
|
"loss": 3.8523, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.373451129188077e-05, |
|
"loss": 3.8464, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.369950856166781e-05, |
|
"loss": 3.8531, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3664505831454855e-05, |
|
"loss": 3.8546, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.36295031012419e-05, |
|
"loss": 3.8541, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.359450037102894e-05, |
|
"loss": 3.8543, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.355949764081599e-05, |
|
"loss": 3.843, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.352449491060303e-05, |
|
"loss": 3.8417, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3489492180390075e-05, |
|
"loss": 3.8557, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.345448945017712e-05, |
|
"loss": 3.8442, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3419486719964156e-05, |
|
"loss": 3.8476, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.33844839897512e-05, |
|
"loss": 3.8586, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3349481259538244e-05, |
|
"loss": 3.8588, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.3314478529325295e-05, |
|
"loss": 3.8463, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.327947579911233e-05, |
|
"loss": 3.8481, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.3244473068899376e-05, |
|
"loss": 3.848, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.320947033868642e-05, |
|
"loss": 3.847, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.3174467608473464e-05, |
|
"loss": 3.8523, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.31394648782605e-05, |
|
"loss": 3.8496, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.3104462148047546e-05, |
|
"loss": 3.846, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.3069459417834596e-05, |
|
"loss": 3.8335, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.303445668762164e-05, |
|
"loss": 3.8494, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.299945395740868e-05, |
|
"loss": 3.8441, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 3.788637399673462, |
|
"eval_runtime": 182.4163, |
|
"eval_samples_per_second": 220.983, |
|
"eval_steps_per_second": 18.419, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.296445122719572e-05, |
|
"loss": 3.8351, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.2929448496982766e-05, |
|
"loss": 3.8423, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.289444576676981e-05, |
|
"loss": 3.8398, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.2859443036556854e-05, |
|
"loss": 3.841, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.28244403063439e-05, |
|
"loss": 3.8411, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.278943757613094e-05, |
|
"loss": 3.8388, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.2754434845917986e-05, |
|
"loss": 3.8472, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.271943211570503e-05, |
|
"loss": 3.841, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.268442938549207e-05, |
|
"loss": 3.8402, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.264942665527911e-05, |
|
"loss": 3.845, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2614423925066155e-05, |
|
"loss": 3.8442, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2579421194853206e-05, |
|
"loss": 3.8397, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.254441846464024e-05, |
|
"loss": 3.84, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.250941573442729e-05, |
|
"loss": 3.832, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.247441300421433e-05, |
|
"loss": 3.8386, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2439410274001375e-05, |
|
"loss": 3.8431, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.240440754378842e-05, |
|
"loss": 3.8362, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.2369404813575456e-05, |
|
"loss": 3.8429, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.233440208336251e-05, |
|
"loss": 3.8358, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.229939935314955e-05, |
|
"loss": 3.8403, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.226439662293659e-05, |
|
"loss": 3.8354, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.222939389272363e-05, |
|
"loss": 3.8385, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.2194391162510677e-05, |
|
"loss": 3.8427, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.215938843229772e-05, |
|
"loss": 3.8359, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.2124385702084765e-05, |
|
"loss": 3.8363, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.208938297187181e-05, |
|
"loss": 3.8394, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.205438024165885e-05, |
|
"loss": 3.8312, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.2019377511445897e-05, |
|
"loss": 3.8373, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.198437478123294e-05, |
|
"loss": 3.8337, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.194937205101998e-05, |
|
"loss": 3.8302, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.191436932080702e-05, |
|
"loss": 3.8413, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.1879366590594066e-05, |
|
"loss": 3.8328, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.184436386038112e-05, |
|
"loss": 3.8381, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.1809361130168154e-05, |
|
"loss": 3.8419, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.17743583999552e-05, |
|
"loss": 3.8368, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.173935566974224e-05, |
|
"loss": 3.8347, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.1704352939529286e-05, |
|
"loss": 3.8395, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.166935020931633e-05, |
|
"loss": 3.8337, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.163434747910337e-05, |
|
"loss": 3.8256, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.159934474889042e-05, |
|
"loss": 3.8384, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 3.7787821292877197, |
|
"eval_runtime": 180.932, |
|
"eval_samples_per_second": 222.796, |
|
"eval_steps_per_second": 18.571, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.156434201867746e-05, |
|
"loss": 3.8271, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.1529339288464506e-05, |
|
"loss": 3.8378, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.149433655825154e-05, |
|
"loss": 3.8352, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.145933382803859e-05, |
|
"loss": 3.8364, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.142433109782563e-05, |
|
"loss": 3.8263, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.1389328367612675e-05, |
|
"loss": 3.8312, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.135432563739972e-05, |
|
"loss": 3.8375, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.1319322907186763e-05, |
|
"loss": 3.8278, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.128432017697381e-05, |
|
"loss": 3.8329, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.124931744676085e-05, |
|
"loss": 3.8269, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.121431471654789e-05, |
|
"loss": 3.8305, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.117931198633493e-05, |
|
"loss": 3.8237, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.114430925612198e-05, |
|
"loss": 3.8303, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.110930652590903e-05, |
|
"loss": 3.8248, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.1074303795696065e-05, |
|
"loss": 3.8353, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.103930106548311e-05, |
|
"loss": 3.8337, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.100429833527015e-05, |
|
"loss": 3.8246, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.09692956050572e-05, |
|
"loss": 3.8216, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.093429287484424e-05, |
|
"loss": 3.8326, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.0899290144631285e-05, |
|
"loss": 3.831, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.086428741441833e-05, |
|
"loss": 3.823, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.082928468420537e-05, |
|
"loss": 3.8296, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.079428195399242e-05, |
|
"loss": 3.8282, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.0759279223779454e-05, |
|
"loss": 3.8346, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.07242764935665e-05, |
|
"loss": 3.8352, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.068927376335354e-05, |
|
"loss": 3.8253, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0654271033140586e-05, |
|
"loss": 3.8289, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.061926830292763e-05, |
|
"loss": 3.8289, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0584265572714674e-05, |
|
"loss": 3.8242, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.054926284250172e-05, |
|
"loss": 3.8281, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.051426011228876e-05, |
|
"loss": 3.8155, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.04792573820758e-05, |
|
"loss": 3.8252, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0444254651862844e-05, |
|
"loss": 3.8203, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0409251921649894e-05, |
|
"loss": 3.824, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.037424919143694e-05, |
|
"loss": 3.8198, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0339246461223976e-05, |
|
"loss": 3.8241, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.030424373101102e-05, |
|
"loss": 3.8289, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.0269241000798064e-05, |
|
"loss": 3.8253, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.023423827058511e-05, |
|
"loss": 3.821, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.019923554037215e-05, |
|
"loss": 3.8249, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"eval_loss": 3.771368980407715, |
|
"eval_runtime": 180.7357, |
|
"eval_samples_per_second": 223.038, |
|
"eval_steps_per_second": 18.591, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.0164232810159196e-05, |
|
"loss": 3.8255, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.012923007994624e-05, |
|
"loss": 3.8194, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.0094227349733284e-05, |
|
"loss": 3.8242, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.005922461952033e-05, |
|
"loss": 3.8226, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.0024221889307365e-05, |
|
"loss": 3.821, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.998921915909441e-05, |
|
"loss": 3.8194, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.995421642888145e-05, |
|
"loss": 3.8112, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.9919213698668504e-05, |
|
"loss": 3.8205, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.988421096845554e-05, |
|
"loss": 3.8162, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.9849208238242585e-05, |
|
"loss": 3.8232, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.981420550802963e-05, |
|
"loss": 3.812, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 3.977920277781667e-05, |
|
"loss": 3.8194, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.974420004760371e-05, |
|
"loss": 3.8119, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9709197317390754e-05, |
|
"loss": 3.8117, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9674194587177805e-05, |
|
"loss": 3.8242, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.963919185696485e-05, |
|
"loss": 3.8125, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9604189126751887e-05, |
|
"loss": 3.8162, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.956918639653893e-05, |
|
"loss": 3.8126, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9534183666325975e-05, |
|
"loss": 3.8209, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.949918093611302e-05, |
|
"loss": 3.8226, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.946417820590006e-05, |
|
"loss": 3.8127, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.942917547568711e-05, |
|
"loss": 3.8151, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.939417274547415e-05, |
|
"loss": 3.8167, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9359170015261195e-05, |
|
"loss": 3.8106, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.932416728504824e-05, |
|
"loss": 3.8236, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.9289164554835276e-05, |
|
"loss": 3.8167, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 3.925416182462232e-05, |
|
"loss": 3.8219, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9219159094409364e-05, |
|
"loss": 3.8201, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9184156364196415e-05, |
|
"loss": 3.8139, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.914915363398345e-05, |
|
"loss": 3.82, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9114150903770496e-05, |
|
"loss": 3.8084, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.907914817355754e-05, |
|
"loss": 3.8134, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.9044145443344584e-05, |
|
"loss": 3.8159, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.900914271313162e-05, |
|
"loss": 3.8125, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.8974139982918665e-05, |
|
"loss": 3.8175, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.8939137252705716e-05, |
|
"loss": 3.8198, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.890413452249276e-05, |
|
"loss": 3.8158, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.88691317922798e-05, |
|
"loss": 3.8124, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.883412906206684e-05, |
|
"loss": 3.8133, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.8799126331853885e-05, |
|
"loss": 3.8122, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_loss": 3.7651710510253906, |
|
"eval_runtime": 181.7539, |
|
"eval_samples_per_second": 221.789, |
|
"eval_steps_per_second": 18.487, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 3.876412360164093e-05, |
|
"loss": 3.8118, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8729120871427973e-05, |
|
"loss": 3.8135, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.869411814121502e-05, |
|
"loss": 3.8208, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.865911541100206e-05, |
|
"loss": 3.8154, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8624112680789106e-05, |
|
"loss": 3.8193, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.858910995057615e-05, |
|
"loss": 3.8063, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.855410722036319e-05, |
|
"loss": 3.8104, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.851910449015023e-05, |
|
"loss": 3.8108, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8484101759937275e-05, |
|
"loss": 3.8145, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8449099029724326e-05, |
|
"loss": 3.8101, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.841409629951136e-05, |
|
"loss": 3.8102, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.837909356929841e-05, |
|
"loss": 3.8208, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.834409083908545e-05, |
|
"loss": 3.8152, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.8309088108872495e-05, |
|
"loss": 3.8183, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 3.827408537865953e-05, |
|
"loss": 3.8108, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.8239082648446576e-05, |
|
"loss": 3.8077, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.820407991823363e-05, |
|
"loss": 3.8078, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.816907718802067e-05, |
|
"loss": 3.8074, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.813407445780771e-05, |
|
"loss": 3.8135, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.809907172759475e-05, |
|
"loss": 3.8093, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.8064068997381796e-05, |
|
"loss": 3.8097, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.802906626716884e-05, |
|
"loss": 3.8182, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.7994063536955884e-05, |
|
"loss": 3.8122, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.795906080674293e-05, |
|
"loss": 3.8019, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.792405807652997e-05, |
|
"loss": 3.8061, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.7889055346317016e-05, |
|
"loss": 3.81, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.785405261610406e-05, |
|
"loss": 3.8059, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.78190498858911e-05, |
|
"loss": 3.8048, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 3.778404715567814e-05, |
|
"loss": 3.808, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7749044425465186e-05, |
|
"loss": 3.806, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7714041695252236e-05, |
|
"loss": 3.8109, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7679038965039274e-05, |
|
"loss": 3.8073, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.764403623482632e-05, |
|
"loss": 3.803, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.760903350461336e-05, |
|
"loss": 3.8009, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7574030774400406e-05, |
|
"loss": 3.8101, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.753902804418745e-05, |
|
"loss": 3.8027, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.750402531397449e-05, |
|
"loss": 3.8064, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.746902258376154e-05, |
|
"loss": 3.8025, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.743401985354858e-05, |
|
"loss": 3.7995, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.739901712333562e-05, |
|
"loss": 3.8068, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"eval_loss": 3.759101152420044, |
|
"eval_runtime": 182.9179, |
|
"eval_samples_per_second": 220.378, |
|
"eval_steps_per_second": 18.369, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.736401439312266e-05, |
|
"loss": 3.7984, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.732901166290971e-05, |
|
"loss": 3.8079, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.729400893269675e-05, |
|
"loss": 3.7971, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 3.7259006202483795e-05, |
|
"loss": 3.8016, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.722400347227084e-05, |
|
"loss": 3.7943, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.718900074205788e-05, |
|
"loss": 3.8015, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.715399801184493e-05, |
|
"loss": 3.8126, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.711899528163197e-05, |
|
"loss": 3.8064, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.708399255141901e-05, |
|
"loss": 3.8046, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.704898982120605e-05, |
|
"loss": 3.8051, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.70139870909931e-05, |
|
"loss": 3.8032, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.697898436078015e-05, |
|
"loss": 3.799, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.6943981630567185e-05, |
|
"loss": 3.8088, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.690897890035423e-05, |
|
"loss": 3.8168, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.687397617014127e-05, |
|
"loss": 3.8034, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.683897343992832e-05, |
|
"loss": 3.8067, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.680397070971536e-05, |
|
"loss": 3.7976, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 3.6768967979502405e-05, |
|
"loss": 3.8125, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.673396524928945e-05, |
|
"loss": 3.8003, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.669896251907649e-05, |
|
"loss": 3.8002, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.666395978886353e-05, |
|
"loss": 3.8045, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6628957058650574e-05, |
|
"loss": 3.8036, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.659395432843762e-05, |
|
"loss": 3.8017, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.655895159822466e-05, |
|
"loss": 3.796, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6523948868011706e-05, |
|
"loss": 3.8091, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.648894613779875e-05, |
|
"loss": 3.7957, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6453943407585794e-05, |
|
"loss": 3.8073, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.641894067737284e-05, |
|
"loss": 3.8021, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.638393794715988e-05, |
|
"loss": 3.8052, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.634893521694692e-05, |
|
"loss": 3.796, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6313932486733963e-05, |
|
"loss": 3.8016, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 3.6278929756521014e-05, |
|
"loss": 3.8016, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.624392702630806e-05, |
|
"loss": 3.7951, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.6208924296095095e-05, |
|
"loss": 3.8039, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.617392156588214e-05, |
|
"loss": 3.7921, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.6138918835669184e-05, |
|
"loss": 3.8025, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.610391610545623e-05, |
|
"loss": 3.7928, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.606891337524327e-05, |
|
"loss": 3.7958, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.6033910645030316e-05, |
|
"loss": 3.8002, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.599890791481736e-05, |
|
"loss": 3.8021, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 3.7523248195648193, |
|
"eval_runtime": 181.1441, |
|
"eval_samples_per_second": 222.535, |
|
"eval_steps_per_second": 18.549, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.5963905184604404e-05, |
|
"loss": 3.7979, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.592890245439145e-05, |
|
"loss": 3.7938, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.5893899724178485e-05, |
|
"loss": 3.8018, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.585889699396553e-05, |
|
"loss": 3.7925, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.582389426375257e-05, |
|
"loss": 3.7982, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.578889153353962e-05, |
|
"loss": 3.7997, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 3.575388880332666e-05, |
|
"loss": 3.8015, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5718886073113705e-05, |
|
"loss": 3.8017, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.568388334290075e-05, |
|
"loss": 3.7931, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.564888061268779e-05, |
|
"loss": 3.7946, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.561387788247483e-05, |
|
"loss": 3.7965, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5578875152261874e-05, |
|
"loss": 3.7946, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5543872422048925e-05, |
|
"loss": 3.7925, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.550886969183597e-05, |
|
"loss": 3.7934, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5473866961623006e-05, |
|
"loss": 3.8023, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.543886423141005e-05, |
|
"loss": 3.7931, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5403861501197094e-05, |
|
"loss": 3.7911, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.536885877098414e-05, |
|
"loss": 3.7991, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.533385604077118e-05, |
|
"loss": 3.801, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.5298853310558226e-05, |
|
"loss": 3.7951, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 3.526385058034527e-05, |
|
"loss": 3.7906, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5228847850132314e-05, |
|
"loss": 3.7809, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.519384511991936e-05, |
|
"loss": 3.7916, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5158842389706396e-05, |
|
"loss": 3.7941, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.512383965949344e-05, |
|
"loss": 3.7922, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5088836929280484e-05, |
|
"loss": 3.7904, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.5053834199067535e-05, |
|
"loss": 3.7947, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.501883146885457e-05, |
|
"loss": 3.7828, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.4983828738641616e-05, |
|
"loss": 3.794, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.494882600842866e-05, |
|
"loss": 3.7928, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.4913823278215704e-05, |
|
"loss": 3.7843, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.487882054800274e-05, |
|
"loss": 3.7967, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.4843817817789785e-05, |
|
"loss": 3.7976, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.4808815087576836e-05, |
|
"loss": 3.7915, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 3.477381235736388e-05, |
|
"loss": 3.7942, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.473880962715092e-05, |
|
"loss": 3.7906, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.470380689693796e-05, |
|
"loss": 3.7929, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4668804166725005e-05, |
|
"loss": 3.7997, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.463380143651205e-05, |
|
"loss": 3.7962, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.459879870629909e-05, |
|
"loss": 3.7867, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 3.7492103576660156, |
|
"eval_runtime": 181.991, |
|
"eval_samples_per_second": 221.5, |
|
"eval_steps_per_second": 18.462, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.456379597608614e-05, |
|
"loss": 3.7849, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.452879324587318e-05, |
|
"loss": 3.799, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4493790515660225e-05, |
|
"loss": 3.7958, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.445878778544727e-05, |
|
"loss": 3.7927, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4423785055234307e-05, |
|
"loss": 3.795, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.438878232502135e-05, |
|
"loss": 3.8021, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4353779594808395e-05, |
|
"loss": 3.793, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.4318776864595445e-05, |
|
"loss": 3.7882, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 3.428377413438248e-05, |
|
"loss": 3.7989, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.424877140416953e-05, |
|
"loss": 3.7865, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.421376867395657e-05, |
|
"loss": 3.7883, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.4178765943743615e-05, |
|
"loss": 3.7811, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.414376321353065e-05, |
|
"loss": 3.7851, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.4108760483317696e-05, |
|
"loss": 3.7883, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.407375775310475e-05, |
|
"loss": 3.7858, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.403875502289179e-05, |
|
"loss": 3.7912, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.400375229267883e-05, |
|
"loss": 3.7874, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.396874956246587e-05, |
|
"loss": 3.791, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.3933746832252916e-05, |
|
"loss": 3.7938, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.389874410203996e-05, |
|
"loss": 3.7871, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.3863741371827004e-05, |
|
"loss": 3.7901, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.382873864161405e-05, |
|
"loss": 3.7823, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.379373591140109e-05, |
|
"loss": 3.7971, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.3758733181188136e-05, |
|
"loss": 3.7919, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.372373045097518e-05, |
|
"loss": 3.785, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.368872772076222e-05, |
|
"loss": 3.7862, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.365372499054926e-05, |
|
"loss": 3.787, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.361872226033631e-05, |
|
"loss": 3.7923, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.3583719530123356e-05, |
|
"loss": 3.7858, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.3548716799910394e-05, |
|
"loss": 3.7919, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.351371406969744e-05, |
|
"loss": 3.7923, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.347871133948448e-05, |
|
"loss": 3.7881, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.3443708609271526e-05, |
|
"loss": 3.7899, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.340870587905856e-05, |
|
"loss": 3.7966, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.3373703148845614e-05, |
|
"loss": 3.7794, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.333870041863266e-05, |
|
"loss": 3.7874, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.33036976884197e-05, |
|
"loss": 3.7859, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 3.326869495820674e-05, |
|
"loss": 3.7856, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.323369222799378e-05, |
|
"loss": 3.7855, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.319868949778083e-05, |
|
"loss": 3.7921, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_loss": 3.744399309158325, |
|
"eval_runtime": 181.6817, |
|
"eval_samples_per_second": 221.877, |
|
"eval_steps_per_second": 18.494, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.316368676756787e-05, |
|
"loss": 3.7826, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.3128684037354915e-05, |
|
"loss": 3.7798, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.309368130714196e-05, |
|
"loss": 3.7802, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.3058678576929e-05, |
|
"loss": 3.7929, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.302367584671605e-05, |
|
"loss": 3.7899, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.298867311650309e-05, |
|
"loss": 3.7904, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.295367038629013e-05, |
|
"loss": 3.7864, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.291866765607717e-05, |
|
"loss": 3.7829, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.288366492586422e-05, |
|
"loss": 3.7875, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.284866219565127e-05, |
|
"loss": 3.788, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.2813659465438304e-05, |
|
"loss": 3.7744, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 3.277865673522535e-05, |
|
"loss": 3.7826, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.274365400501239e-05, |
|
"loss": 3.7918, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2708651274799436e-05, |
|
"loss": 3.7783, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2673648544586474e-05, |
|
"loss": 3.7763, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2638645814373524e-05, |
|
"loss": 3.7855, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.260364308416057e-05, |
|
"loss": 3.7848, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.256864035394761e-05, |
|
"loss": 3.7803, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.253363762373465e-05, |
|
"loss": 3.7828, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2498634893521694e-05, |
|
"loss": 3.7818, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.246363216330874e-05, |
|
"loss": 3.7831, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.242862943309578e-05, |
|
"loss": 3.79, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2393626702882826e-05, |
|
"loss": 3.7824, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.235862397266987e-05, |
|
"loss": 3.7819, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2323621242456914e-05, |
|
"loss": 3.7877, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.228861851224396e-05, |
|
"loss": 3.7745, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 3.2253615782031e-05, |
|
"loss": 3.7848, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.221861305181804e-05, |
|
"loss": 3.7822, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.218361032160508e-05, |
|
"loss": 3.7804, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.2148607591392134e-05, |
|
"loss": 3.7798, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.211360486117918e-05, |
|
"loss": 3.7808, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.2078602130966215e-05, |
|
"loss": 3.7822, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.204359940075326e-05, |
|
"loss": 3.7831, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.20085966705403e-05, |
|
"loss": 3.7768, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.197359394032735e-05, |
|
"loss": 3.7753, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.193859121011439e-05, |
|
"loss": 3.785, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.1903588479901435e-05, |
|
"loss": 3.7777, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.186858574968848e-05, |
|
"loss": 3.7798, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.1833583019475523e-05, |
|
"loss": 3.7825, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.179858028926256e-05, |
|
"loss": 3.7812, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"eval_loss": 3.740469455718994, |
|
"eval_runtime": 182.8779, |
|
"eval_samples_per_second": 220.426, |
|
"eval_steps_per_second": 18.373, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.1763577559049605e-05, |
|
"loss": 3.7821, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.172857482883665e-05, |
|
"loss": 3.7704, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.169357209862369e-05, |
|
"loss": 3.7791, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.165856936841074e-05, |
|
"loss": 3.7642, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.162356663819778e-05, |
|
"loss": 3.7788, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1588563907984825e-05, |
|
"loss": 3.7789, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.155356117777187e-05, |
|
"loss": 3.7834, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.151855844755891e-05, |
|
"loss": 3.7739, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.148355571734595e-05, |
|
"loss": 3.7797, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1448552987132994e-05, |
|
"loss": 3.7726, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1413550256920045e-05, |
|
"loss": 3.7759, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.137854752670709e-05, |
|
"loss": 3.777, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1343544796494126e-05, |
|
"loss": 3.7699, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.130854206628117e-05, |
|
"loss": 3.7698, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 3.1273539336068214e-05, |
|
"loss": 3.7803, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.123853660585526e-05, |
|
"loss": 3.7733, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.12035338756423e-05, |
|
"loss": 3.7741, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.1168531145429346e-05, |
|
"loss": 3.7708, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.113352841521639e-05, |
|
"loss": 3.7854, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.1098525685003434e-05, |
|
"loss": 3.7775, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.106352295479048e-05, |
|
"loss": 3.7632, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.1028520224577516e-05, |
|
"loss": 3.7771, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.099351749436456e-05, |
|
"loss": 3.7809, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.0958514764151604e-05, |
|
"loss": 3.7837, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.092351203393865e-05, |
|
"loss": 3.7755, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.088850930372569e-05, |
|
"loss": 3.7787, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.0853506573512736e-05, |
|
"loss": 3.776, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.081850384329978e-05, |
|
"loss": 3.7697, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 3.0783501113086824e-05, |
|
"loss": 3.7735, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.074849838287386e-05, |
|
"loss": 3.7773, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0713495652660905e-05, |
|
"loss": 3.7735, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0678492922447956e-05, |
|
"loss": 3.7763, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0643490192235e-05, |
|
"loss": 3.7779, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.060848746202204e-05, |
|
"loss": 3.7748, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.057348473180908e-05, |
|
"loss": 3.7644, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0538482001596125e-05, |
|
"loss": 3.7748, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.050347927138317e-05, |
|
"loss": 3.7756, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.046847654117021e-05, |
|
"loss": 3.775, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0433473810957257e-05, |
|
"loss": 3.7691, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.03984710807443e-05, |
|
"loss": 3.7772, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"eval_loss": 3.7369792461395264, |
|
"eval_runtime": 182.5434, |
|
"eval_samples_per_second": 220.83, |
|
"eval_steps_per_second": 18.407, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0363468350531342e-05, |
|
"loss": 3.7743, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.0328465620318386e-05, |
|
"loss": 3.7721, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.029346289010543e-05, |
|
"loss": 3.774, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 3.025846015989247e-05, |
|
"loss": 3.7756, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0223457429679518e-05, |
|
"loss": 3.7713, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0188454699466562e-05, |
|
"loss": 3.7754, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0153451969253606e-05, |
|
"loss": 3.7672, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0118449239040646e-05, |
|
"loss": 3.7768, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.008344650882769e-05, |
|
"loss": 3.7734, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.004844377861473e-05, |
|
"loss": 3.7744, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 3.0013441048401775e-05, |
|
"loss": 3.7723, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9978438318188823e-05, |
|
"loss": 3.7721, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9943435587975867e-05, |
|
"loss": 3.7731, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9908432857762907e-05, |
|
"loss": 3.7687, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.987343012754995e-05, |
|
"loss": 3.7721, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9838427397336992e-05, |
|
"loss": 3.7723, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.9803424667124036e-05, |
|
"loss": 3.7637, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 2.976842193691108e-05, |
|
"loss": 3.76, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9733419206698127e-05, |
|
"loss": 3.774, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9698416476485168e-05, |
|
"loss": 3.7761, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9663413746272212e-05, |
|
"loss": 3.7703, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9628411016059256e-05, |
|
"loss": 3.7649, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9593408285846297e-05, |
|
"loss": 3.7707, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.955840555563334e-05, |
|
"loss": 3.7638, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.952340282542038e-05, |
|
"loss": 3.7685, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.948840009520743e-05, |
|
"loss": 3.7673, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9453397364994473e-05, |
|
"loss": 3.7811, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9418394634781517e-05, |
|
"loss": 3.7743, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9383391904568557e-05, |
|
"loss": 3.7675, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.93483891743556e-05, |
|
"loss": 3.7661, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9313386444142642e-05, |
|
"loss": 3.7577, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 2.9278383713929686e-05, |
|
"loss": 3.7638, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9243380983716733e-05, |
|
"loss": 3.7673, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9208378253503777e-05, |
|
"loss": 3.7735, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9173375523290818e-05, |
|
"loss": 3.7671, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9138372793077862e-05, |
|
"loss": 3.7689, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9103370062864903e-05, |
|
"loss": 3.7728, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.9068367332651947e-05, |
|
"loss": 3.7749, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.903336460243899e-05, |
|
"loss": 3.7739, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8998361872226038e-05, |
|
"loss": 3.7617, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 3.733321189880371, |
|
"eval_runtime": 182.5498, |
|
"eval_samples_per_second": 220.822, |
|
"eval_steps_per_second": 18.406, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.896335914201308e-05, |
|
"loss": 3.7699, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8928356411800123e-05, |
|
"loss": 3.7678, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8893353681587167e-05, |
|
"loss": 3.7641, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8858350951374207e-05, |
|
"loss": 3.7752, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.882334822116125e-05, |
|
"loss": 3.7639, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.8788345490948292e-05, |
|
"loss": 3.7675, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.875334276073534e-05, |
|
"loss": 3.7673, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8718340030522384e-05, |
|
"loss": 3.7759, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8683337300309428e-05, |
|
"loss": 3.7708, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8648334570096468e-05, |
|
"loss": 3.7707, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8613331839883512e-05, |
|
"loss": 3.7654, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8578329109670553e-05, |
|
"loss": 3.776, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8543326379457597e-05, |
|
"loss": 3.7603, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8508323649244644e-05, |
|
"loss": 3.7648, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8473320919031688e-05, |
|
"loss": 3.7644, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.843831818881873e-05, |
|
"loss": 3.7688, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8403315458605773e-05, |
|
"loss": 3.7645, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8368312728392814e-05, |
|
"loss": 3.7613, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.8333309998179858e-05, |
|
"loss": 3.7697, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.82983072679669e-05, |
|
"loss": 3.7655, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 2.826330453775395e-05, |
|
"loss": 3.7665, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.822830180754099e-05, |
|
"loss": 3.771, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8193299077328034e-05, |
|
"loss": 3.7667, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8158296347115078e-05, |
|
"loss": 3.7703, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.812329361690212e-05, |
|
"loss": 3.774, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8088290886689162e-05, |
|
"loss": 3.7567, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8053288156476203e-05, |
|
"loss": 3.7658, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.8018285426263254e-05, |
|
"loss": 3.771, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7983282696050294e-05, |
|
"loss": 3.7701, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.794827996583734e-05, |
|
"loss": 3.768, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.791327723562438e-05, |
|
"loss": 3.7666, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7878274505411423e-05, |
|
"loss": 3.7758, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7843271775198464e-05, |
|
"loss": 3.764, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7808269044985508e-05, |
|
"loss": 3.7687, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 2.7773266314772555e-05, |
|
"loss": 3.7698, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.77382635845596e-05, |
|
"loss": 3.7666, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.770326085434664e-05, |
|
"loss": 3.7639, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7668258124133684e-05, |
|
"loss": 3.7621, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7633255393920728e-05, |
|
"loss": 3.7605, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.759825266370777e-05, |
|
"loss": 3.7663, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"eval_loss": 3.7304601669311523, |
|
"eval_runtime": 181.9514, |
|
"eval_samples_per_second": 221.548, |
|
"eval_steps_per_second": 18.466, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7563249933494812e-05, |
|
"loss": 3.76, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.752824720328186e-05, |
|
"loss": 3.7621, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.74932444730689e-05, |
|
"loss": 3.7578, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7458241742855945e-05, |
|
"loss": 3.7628, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.742323901264299e-05, |
|
"loss": 3.7616, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.738823628243003e-05, |
|
"loss": 3.7746, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7353233552217073e-05, |
|
"loss": 3.75, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7318230822004114e-05, |
|
"loss": 3.7669, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 2.7283228091791165e-05, |
|
"loss": 3.7701, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7248225361578205e-05, |
|
"loss": 3.762, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.721322263136525e-05, |
|
"loss": 3.7619, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.717821990115229e-05, |
|
"loss": 3.7564, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7143217170939334e-05, |
|
"loss": 3.7552, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7108214440726375e-05, |
|
"loss": 3.7657, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.707321171051342e-05, |
|
"loss": 3.7524, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.7038208980300466e-05, |
|
"loss": 3.7685, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.700320625008751e-05, |
|
"loss": 3.7611, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.696820351987455e-05, |
|
"loss": 3.7589, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.6933200789661595e-05, |
|
"loss": 3.7735, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.689819805944864e-05, |
|
"loss": 3.7613, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.686319532923568e-05, |
|
"loss": 3.7642, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.6828192599022723e-05, |
|
"loss": 3.7613, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.679318986880977e-05, |
|
"loss": 3.7577, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 2.675818713859681e-05, |
|
"loss": 3.7619, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6723184408383855e-05, |
|
"loss": 3.7607, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.66881816781709e-05, |
|
"loss": 3.7593, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.665317894795794e-05, |
|
"loss": 3.7535, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6618176217744984e-05, |
|
"loss": 3.7623, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.658317348753203e-05, |
|
"loss": 3.7521, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6548170757319076e-05, |
|
"loss": 3.7554, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6513168027106116e-05, |
|
"loss": 3.7645, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.647816529689316e-05, |
|
"loss": 3.7639, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.64431625666802e-05, |
|
"loss": 3.7559, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6408159836467245e-05, |
|
"loss": 3.7621, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6373157106254285e-05, |
|
"loss": 3.7576, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6338154376041336e-05, |
|
"loss": 3.7509, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.6303151645828377e-05, |
|
"loss": 3.7632, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 2.626814891561542e-05, |
|
"loss": 3.7577, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.623314618540246e-05, |
|
"loss": 3.7611, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6198143455189506e-05, |
|
"loss": 3.756, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"eval_loss": 3.726016044616699, |
|
"eval_runtime": 181.8756, |
|
"eval_samples_per_second": 221.64, |
|
"eval_steps_per_second": 18.474, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.616314072497655e-05, |
|
"loss": 3.7507, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.612813799476359e-05, |
|
"loss": 3.7625, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6093135264550638e-05, |
|
"loss": 3.7643, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.605813253433768e-05, |
|
"loss": 3.7579, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.6023129804124726e-05, |
|
"loss": 3.7612, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.5988127073911766e-05, |
|
"loss": 3.7584, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.595312434369881e-05, |
|
"loss": 3.7618, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.591812161348585e-05, |
|
"loss": 3.7543, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.5883118883272895e-05, |
|
"loss": 3.7583, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.5848116153059942e-05, |
|
"loss": 3.7629, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.5813113422846986e-05, |
|
"loss": 3.7512, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 2.5778110692634027e-05, |
|
"loss": 3.7585, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.574310796242107e-05, |
|
"loss": 3.7516, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.570810523220811e-05, |
|
"loss": 3.7633, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5673102501995156e-05, |
|
"loss": 3.7587, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.56380997717822e-05, |
|
"loss": 3.7596, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5603097041569247e-05, |
|
"loss": 3.7476, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5568094311356288e-05, |
|
"loss": 3.7602, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5533091581143332e-05, |
|
"loss": 3.7528, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5498088850930372e-05, |
|
"loss": 3.7628, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5463086120717416e-05, |
|
"loss": 3.7567, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.542808339050446e-05, |
|
"loss": 3.752, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.53930806602915e-05, |
|
"loss": 3.7625, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.535807793007855e-05, |
|
"loss": 3.7612, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5323075199865592e-05, |
|
"loss": 3.7537, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5288072469652637e-05, |
|
"loss": 3.752, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.5253069739439677e-05, |
|
"loss": 3.7538, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.521806700922672e-05, |
|
"loss": 3.7523, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5183064279013762e-05, |
|
"loss": 3.7611, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5148061548800806e-05, |
|
"loss": 3.7549, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5113058818587853e-05, |
|
"loss": 3.755, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5078056088374897e-05, |
|
"loss": 3.7583, |
|
"step": 356000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5043053358161938e-05, |
|
"loss": 3.7603, |
|
"step": 356500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.5008050627948982e-05, |
|
"loss": 3.7573, |
|
"step": 357000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4973047897736023e-05, |
|
"loss": 3.7613, |
|
"step": 357500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.493804516752307e-05, |
|
"loss": 3.7545, |
|
"step": 358000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.490304243731011e-05, |
|
"loss": 3.7553, |
|
"step": 358500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4868039707097155e-05, |
|
"loss": 3.7642, |
|
"step": 359000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.48330369768842e-05, |
|
"loss": 3.7519, |
|
"step": 359500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4798034246671243e-05, |
|
"loss": 3.7542, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"eval_loss": 3.7219858169555664, |
|
"eval_runtime": 182.7192, |
|
"eval_samples_per_second": 220.617, |
|
"eval_steps_per_second": 18.389, |
|
"step": 360000 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 2.4763031516458283e-05, |
|
"loss": 3.7509, |
|
"step": 360500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4728028786245327e-05, |
|
"loss": 3.7522, |
|
"step": 361000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.469302605603237e-05, |
|
"loss": 3.752, |
|
"step": 361500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4658023325819415e-05, |
|
"loss": 3.7508, |
|
"step": 362000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.462302059560646e-05, |
|
"loss": 3.7615, |
|
"step": 362500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.45880178653935e-05, |
|
"loss": 3.7523, |
|
"step": 363000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4553015135180547e-05, |
|
"loss": 3.7621, |
|
"step": 363500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4518012404967588e-05, |
|
"loss": 3.7539, |
|
"step": 364000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4483009674754632e-05, |
|
"loss": 3.748, |
|
"step": 364500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4448006944541676e-05, |
|
"loss": 3.7476, |
|
"step": 365000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.441300421432872e-05, |
|
"loss": 3.7537, |
|
"step": 365500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.437800148411576e-05, |
|
"loss": 3.7447, |
|
"step": 366000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4342998753902805e-05, |
|
"loss": 3.7535, |
|
"step": 366500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.430799602368985e-05, |
|
"loss": 3.7529, |
|
"step": 367000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 2.4272993293476893e-05, |
|
"loss": 3.7549, |
|
"step": 367500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4237990563263933e-05, |
|
"loss": 3.7562, |
|
"step": 368000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.420298783305098e-05, |
|
"loss": 3.7555, |
|
"step": 368500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.416798510283802e-05, |
|
"loss": 3.7578, |
|
"step": 369000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4132982372625065e-05, |
|
"loss": 3.7545, |
|
"step": 369500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.409797964241211e-05, |
|
"loss": 3.7449, |
|
"step": 370000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4062976912199153e-05, |
|
"loss": 3.7506, |
|
"step": 370500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.4027974181986197e-05, |
|
"loss": 3.7478, |
|
"step": 371000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3992971451773238e-05, |
|
"loss": 3.768, |
|
"step": 371500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3957968721560282e-05, |
|
"loss": 3.7546, |
|
"step": 372000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3922965991347326e-05, |
|
"loss": 3.7459, |
|
"step": 372500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.388796326113437e-05, |
|
"loss": 3.7546, |
|
"step": 373000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3852960530921414e-05, |
|
"loss": 3.7544, |
|
"step": 373500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.3817957800708458e-05, |
|
"loss": 3.7653, |
|
"step": 374000 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 2.37829550704955e-05, |
|
"loss": 3.7497, |
|
"step": 374500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3747952340282543e-05, |
|
"loss": 3.7493, |
|
"step": 375000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3712949610069587e-05, |
|
"loss": 3.7599, |
|
"step": 375500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.367794687985663e-05, |
|
"loss": 3.7386, |
|
"step": 376000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.364294414964367e-05, |
|
"loss": 3.7473, |
|
"step": 376500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.360794141943072e-05, |
|
"loss": 3.7525, |
|
"step": 377000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.357293868921776e-05, |
|
"loss": 3.746, |
|
"step": 377500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3537935959004804e-05, |
|
"loss": 3.749, |
|
"step": 378000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3502933228791844e-05, |
|
"loss": 3.7453, |
|
"step": 378500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.346793049857889e-05, |
|
"loss": 3.748, |
|
"step": 379000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3432927768365932e-05, |
|
"loss": 3.7499, |
|
"step": 379500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3397925038152976e-05, |
|
"loss": 3.7419, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"eval_loss": 3.7187860012054443, |
|
"eval_runtime": 181.9498, |
|
"eval_samples_per_second": 221.55, |
|
"eval_steps_per_second": 18.467, |
|
"step": 380000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.336292230794002e-05, |
|
"loss": 3.7527, |
|
"step": 380500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.3327919577727064e-05, |
|
"loss": 3.7496, |
|
"step": 381000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.329291684751411e-05, |
|
"loss": 3.7527, |
|
"step": 381500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.325791411730115e-05, |
|
"loss": 3.7549, |
|
"step": 382000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3222911387088196e-05, |
|
"loss": 3.7553, |
|
"step": 382500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3187908656875237e-05, |
|
"loss": 3.7574, |
|
"step": 383000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.315290592666228e-05, |
|
"loss": 3.7434, |
|
"step": 383500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3117903196449325e-05, |
|
"loss": 3.7522, |
|
"step": 384000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.308290046623637e-05, |
|
"loss": 3.7546, |
|
"step": 384500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.304789773602341e-05, |
|
"loss": 3.7397, |
|
"step": 385000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.3012895005810454e-05, |
|
"loss": 3.7529, |
|
"step": 385500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2977892275597498e-05, |
|
"loss": 3.7553, |
|
"step": 386000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2942889545384542e-05, |
|
"loss": 3.7416, |
|
"step": 386500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2907886815171582e-05, |
|
"loss": 3.7501, |
|
"step": 387000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.287288408495863e-05, |
|
"loss": 3.7453, |
|
"step": 387500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.283788135474567e-05, |
|
"loss": 3.7434, |
|
"step": 388000 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2802878624532714e-05, |
|
"loss": 3.7452, |
|
"step": 388500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 2.2767875894319755e-05, |
|
"loss": 3.7458, |
|
"step": 389000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2732873164106803e-05, |
|
"loss": 3.7433, |
|
"step": 389500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2697870433893843e-05, |
|
"loss": 3.7453, |
|
"step": 390000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2662867703680887e-05, |
|
"loss": 3.7396, |
|
"step": 390500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.262786497346793e-05, |
|
"loss": 3.7448, |
|
"step": 391000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2592862243254975e-05, |
|
"loss": 3.7571, |
|
"step": 391500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.255785951304202e-05, |
|
"loss": 3.7468, |
|
"step": 392000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.252285678282906e-05, |
|
"loss": 3.7502, |
|
"step": 392500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2487854052616107e-05, |
|
"loss": 3.7453, |
|
"step": 393000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2452851322403148e-05, |
|
"loss": 3.7451, |
|
"step": 393500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2417848592190192e-05, |
|
"loss": 3.7421, |
|
"step": 394000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2382845861977236e-05, |
|
"loss": 3.7514, |
|
"step": 394500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.234784313176428e-05, |
|
"loss": 3.74, |
|
"step": 395000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.231284040155132e-05, |
|
"loss": 3.7482, |
|
"step": 395500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.2277837671338365e-05, |
|
"loss": 3.7464, |
|
"step": 396000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.224283494112541e-05, |
|
"loss": 3.7497, |
|
"step": 396500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2207832210912453e-05, |
|
"loss": 3.7529, |
|
"step": 397000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2172829480699493e-05, |
|
"loss": 3.7476, |
|
"step": 397500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.213782675048654e-05, |
|
"loss": 3.7402, |
|
"step": 398000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.210282402027358e-05, |
|
"loss": 3.7454, |
|
"step": 398500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.2067821290060625e-05, |
|
"loss": 3.7488, |
|
"step": 399000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.203281855984767e-05, |
|
"loss": 3.7411, |
|
"step": 399500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1997815829634713e-05, |
|
"loss": 3.7399, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 3.717241048812866, |
|
"eval_runtime": 181.8436, |
|
"eval_samples_per_second": 221.68, |
|
"eval_steps_per_second": 18.477, |
|
"step": 400000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1962813099421754e-05, |
|
"loss": 3.7474, |
|
"step": 400500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1927810369208798e-05, |
|
"loss": 3.7533, |
|
"step": 401000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1892807638995842e-05, |
|
"loss": 3.748, |
|
"step": 401500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1857804908782886e-05, |
|
"loss": 3.7423, |
|
"step": 402000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.182280217856993e-05, |
|
"loss": 3.7431, |
|
"step": 402500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1787799448356974e-05, |
|
"loss": 3.7401, |
|
"step": 403000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.1752796718144018e-05, |
|
"loss": 3.7406, |
|
"step": 403500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.171779398793106e-05, |
|
"loss": 3.737, |
|
"step": 404000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1682791257718103e-05, |
|
"loss": 3.7431, |
|
"step": 404500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1647788527505147e-05, |
|
"loss": 3.7465, |
|
"step": 405000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.161278579729219e-05, |
|
"loss": 3.7409, |
|
"step": 405500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.157778306707923e-05, |
|
"loss": 3.7432, |
|
"step": 406000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.154278033686628e-05, |
|
"loss": 3.7417, |
|
"step": 406500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.150777760665332e-05, |
|
"loss": 3.7428, |
|
"step": 407000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1472774876440364e-05, |
|
"loss": 3.7469, |
|
"step": 407500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1437772146227404e-05, |
|
"loss": 3.7494, |
|
"step": 408000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.140276941601445e-05, |
|
"loss": 3.7407, |
|
"step": 408500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1367766685801492e-05, |
|
"loss": 3.7444, |
|
"step": 409000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1332763955588536e-05, |
|
"loss": 3.7424, |
|
"step": 409500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.129776122537558e-05, |
|
"loss": 3.7485, |
|
"step": 410000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 2.1262758495162624e-05, |
|
"loss": 3.7536, |
|
"step": 410500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1227755764949668e-05, |
|
"loss": 3.7489, |
|
"step": 411000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.119275303473671e-05, |
|
"loss": 3.7445, |
|
"step": 411500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1157750304523756e-05, |
|
"loss": 3.7523, |
|
"step": 412000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1122747574310797e-05, |
|
"loss": 3.7373, |
|
"step": 412500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.108774484409784e-05, |
|
"loss": 3.7382, |
|
"step": 413000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.1052742113884885e-05, |
|
"loss": 3.7469, |
|
"step": 413500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.101773938367193e-05, |
|
"loss": 3.7419, |
|
"step": 414000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.098273665345897e-05, |
|
"loss": 3.7426, |
|
"step": 414500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0947733923246014e-05, |
|
"loss": 3.7556, |
|
"step": 415000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0912731193033058e-05, |
|
"loss": 3.7477, |
|
"step": 415500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.08777284628201e-05, |
|
"loss": 3.7499, |
|
"step": 416000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.0842725732607142e-05, |
|
"loss": 3.7435, |
|
"step": 416500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.080772300239419e-05, |
|
"loss": 3.7463, |
|
"step": 417000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.077272027218123e-05, |
|
"loss": 3.7433, |
|
"step": 417500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0737717541968274e-05, |
|
"loss": 3.7459, |
|
"step": 418000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0702714811755315e-05, |
|
"loss": 3.7456, |
|
"step": 418500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0667712081542362e-05, |
|
"loss": 3.7444, |
|
"step": 419000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0632709351329403e-05, |
|
"loss": 3.7451, |
|
"step": 419500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0597706621116447e-05, |
|
"loss": 3.7429, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"eval_loss": 3.712952136993408, |
|
"eval_runtime": 181.1199, |
|
"eval_samples_per_second": 222.565, |
|
"eval_steps_per_second": 18.551, |
|
"step": 420000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.056270389090349e-05, |
|
"loss": 3.7396, |
|
"step": 420500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0527701160690535e-05, |
|
"loss": 3.7356, |
|
"step": 421000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.049269843047758e-05, |
|
"loss": 3.7405, |
|
"step": 421500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.045769570026462e-05, |
|
"loss": 3.7386, |
|
"step": 422000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0422692970051667e-05, |
|
"loss": 3.7367, |
|
"step": 422500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0387690239838708e-05, |
|
"loss": 3.7373, |
|
"step": 423000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0352687509625752e-05, |
|
"loss": 3.7291, |
|
"step": 423500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.0317684779412796e-05, |
|
"loss": 3.7395, |
|
"step": 424000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 2.028268204919984e-05, |
|
"loss": 3.742, |
|
"step": 424500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.024767931898688e-05, |
|
"loss": 3.7429, |
|
"step": 425000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0212676588773928e-05, |
|
"loss": 3.7487, |
|
"step": 425500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.017767385856097e-05, |
|
"loss": 3.7371, |
|
"step": 426000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0142671128348013e-05, |
|
"loss": 3.7371, |
|
"step": 426500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0107668398135053e-05, |
|
"loss": 3.7434, |
|
"step": 427000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.00726656679221e-05, |
|
"loss": 3.7373, |
|
"step": 427500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.003766293770914e-05, |
|
"loss": 3.7445, |
|
"step": 428000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.0002660207496185e-05, |
|
"loss": 3.743, |
|
"step": 428500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.996765747728323e-05, |
|
"loss": 3.7444, |
|
"step": 429000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9932654747070273e-05, |
|
"loss": 3.7371, |
|
"step": 429500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9897652016857314e-05, |
|
"loss": 3.7395, |
|
"step": 430000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9862649286644358e-05, |
|
"loss": 3.7359, |
|
"step": 430500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9827646556431402e-05, |
|
"loss": 3.739, |
|
"step": 431000 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.9792643826218446e-05, |
|
"loss": 3.7381, |
|
"step": 431500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.975764109600549e-05, |
|
"loss": 3.7349, |
|
"step": 432000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9722638365792534e-05, |
|
"loss": 3.7415, |
|
"step": 432500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9687635635579578e-05, |
|
"loss": 3.7389, |
|
"step": 433000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.965263290536662e-05, |
|
"loss": 3.7386, |
|
"step": 433500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9617630175153663e-05, |
|
"loss": 3.7391, |
|
"step": 434000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9582627444940707e-05, |
|
"loss": 3.7392, |
|
"step": 434500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.954762471472775e-05, |
|
"loss": 3.7452, |
|
"step": 435000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.951262198451479e-05, |
|
"loss": 3.7395, |
|
"step": 435500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.947761925430184e-05, |
|
"loss": 3.7383, |
|
"step": 436000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.944261652408888e-05, |
|
"loss": 3.7408, |
|
"step": 436500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9407613793875923e-05, |
|
"loss": 3.744, |
|
"step": 437000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9372611063662964e-05, |
|
"loss": 3.7293, |
|
"step": 437500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.933760833345001e-05, |
|
"loss": 3.7363, |
|
"step": 438000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9302605603237052e-05, |
|
"loss": 3.7397, |
|
"step": 438500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 1.9267602873024096e-05, |
|
"loss": 3.7405, |
|
"step": 439000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.923260014281114e-05, |
|
"loss": 3.7347, |
|
"step": 439500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9197597412598184e-05, |
|
"loss": 3.7401, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"eval_loss": 3.7116401195526123, |
|
"eval_runtime": 183.8252, |
|
"eval_samples_per_second": 219.29, |
|
"eval_steps_per_second": 18.278, |
|
"step": 440000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9162594682385228e-05, |
|
"loss": 3.7493, |
|
"step": 440500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.912759195217227e-05, |
|
"loss": 3.7293, |
|
"step": 441000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9092589221959313e-05, |
|
"loss": 3.7391, |
|
"step": 441500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.9057586491746357e-05, |
|
"loss": 3.7398, |
|
"step": 442000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.90225837615334e-05, |
|
"loss": 3.7521, |
|
"step": 442500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8987581031320445e-05, |
|
"loss": 3.7385, |
|
"step": 443000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.895257830110749e-05, |
|
"loss": 3.7268, |
|
"step": 443500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.891757557089453e-05, |
|
"loss": 3.7425, |
|
"step": 444000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8882572840681574e-05, |
|
"loss": 3.7316, |
|
"step": 444500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8847570110468618e-05, |
|
"loss": 3.7422, |
|
"step": 445000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.881256738025566e-05, |
|
"loss": 3.7376, |
|
"step": 445500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.8777564650042702e-05, |
|
"loss": 3.7397, |
|
"step": 446000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.874256191982975e-05, |
|
"loss": 3.734, |
|
"step": 446500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.870755918961679e-05, |
|
"loss": 3.7346, |
|
"step": 447000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8672556459403834e-05, |
|
"loss": 3.7412, |
|
"step": 447500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8637553729190878e-05, |
|
"loss": 3.7293, |
|
"step": 448000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8602550998977922e-05, |
|
"loss": 3.7343, |
|
"step": 448500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8567548268764963e-05, |
|
"loss": 3.7343, |
|
"step": 449000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8532545538552007e-05, |
|
"loss": 3.7366, |
|
"step": 449500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.849754280833905e-05, |
|
"loss": 3.7296, |
|
"step": 450000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8462540078126095e-05, |
|
"loss": 3.7306, |
|
"step": 450500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.842753734791314e-05, |
|
"loss": 3.7356, |
|
"step": 451000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8392534617700183e-05, |
|
"loss": 3.7319, |
|
"step": 451500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8357531887487227e-05, |
|
"loss": 3.7393, |
|
"step": 452000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8322529157274268e-05, |
|
"loss": 3.7416, |
|
"step": 452500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8287526427061312e-05, |
|
"loss": 3.7397, |
|
"step": 453000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 1.8252523696848356e-05, |
|
"loss": 3.7347, |
|
"step": 453500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.82175209666354e-05, |
|
"loss": 3.7358, |
|
"step": 454000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.818251823642244e-05, |
|
"loss": 3.7388, |
|
"step": 454500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8147515506209488e-05, |
|
"loss": 3.7361, |
|
"step": 455000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.811251277599653e-05, |
|
"loss": 3.7319, |
|
"step": 455500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8077510045783572e-05, |
|
"loss": 3.7374, |
|
"step": 456000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.8042507315570613e-05, |
|
"loss": 3.734, |
|
"step": 456500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.800750458535766e-05, |
|
"loss": 3.7324, |
|
"step": 457000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.79725018551447e-05, |
|
"loss": 3.7339, |
|
"step": 457500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7937499124931745e-05, |
|
"loss": 3.729, |
|
"step": 458000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.790249639471879e-05, |
|
"loss": 3.7382, |
|
"step": 458500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7867493664505833e-05, |
|
"loss": 3.7432, |
|
"step": 459000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7832490934292874e-05, |
|
"loss": 3.74, |
|
"step": 459500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7797488204079918e-05, |
|
"loss": 3.7403, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"eval_loss": 3.7084054946899414, |
|
"eval_runtime": 183.9432, |
|
"eval_samples_per_second": 219.149, |
|
"eval_steps_per_second": 18.267, |
|
"step": 460000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.7762485473866962e-05, |
|
"loss": 3.7317, |
|
"step": 460500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7727482743654006e-05, |
|
"loss": 3.7421, |
|
"step": 461000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.769248001344105e-05, |
|
"loss": 3.7392, |
|
"step": 461500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7657477283228094e-05, |
|
"loss": 3.744, |
|
"step": 462000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7622474553015138e-05, |
|
"loss": 3.7371, |
|
"step": 462500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.758747182280218e-05, |
|
"loss": 3.7364, |
|
"step": 463000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7552469092589223e-05, |
|
"loss": 3.7375, |
|
"step": 463500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7517466362376267e-05, |
|
"loss": 3.7258, |
|
"step": 464000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.748246363216331e-05, |
|
"loss": 3.7359, |
|
"step": 464500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.744746090195035e-05, |
|
"loss": 3.7302, |
|
"step": 465000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.74124581717374e-05, |
|
"loss": 3.7364, |
|
"step": 465500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.737745544152444e-05, |
|
"loss": 3.7274, |
|
"step": 466000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7342452711311483e-05, |
|
"loss": 3.7376, |
|
"step": 466500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.7307449981098524e-05, |
|
"loss": 3.7258, |
|
"step": 467000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 1.727244725088557e-05, |
|
"loss": 3.7251, |
|
"step": 467500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7237444520672612e-05, |
|
"loss": 3.7381, |
|
"step": 468000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7202441790459656e-05, |
|
"loss": 3.7401, |
|
"step": 468500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.71674390602467e-05, |
|
"loss": 3.7283, |
|
"step": 469000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7132436330033744e-05, |
|
"loss": 3.7294, |
|
"step": 469500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7097433599820785e-05, |
|
"loss": 3.7295, |
|
"step": 470000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.706243086960783e-05, |
|
"loss": 3.7399, |
|
"step": 470500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.7027428139394873e-05, |
|
"loss": 3.7282, |
|
"step": 471000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6992425409181917e-05, |
|
"loss": 3.7327, |
|
"step": 471500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.695742267896896e-05, |
|
"loss": 3.7314, |
|
"step": 472000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6922419948756005e-05, |
|
"loss": 3.7386, |
|
"step": 472500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.688741721854305e-05, |
|
"loss": 3.7334, |
|
"step": 473000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.685241448833009e-05, |
|
"loss": 3.7342, |
|
"step": 473500 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6817411758117137e-05, |
|
"loss": 3.7295, |
|
"step": 474000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.6782409027904177e-05, |
|
"loss": 3.7318, |
|
"step": 474500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.674740629769122e-05, |
|
"loss": 3.7325, |
|
"step": 475000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6712403567478262e-05, |
|
"loss": 3.73, |
|
"step": 475500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.667740083726531e-05, |
|
"loss": 3.7395, |
|
"step": 476000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.664239810705235e-05, |
|
"loss": 3.7365, |
|
"step": 476500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6607395376839394e-05, |
|
"loss": 3.7382, |
|
"step": 477000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6572392646626438e-05, |
|
"loss": 3.7276, |
|
"step": 477500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6537389916413482e-05, |
|
"loss": 3.7355, |
|
"step": 478000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6502387186200523e-05, |
|
"loss": 3.7293, |
|
"step": 478500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6467384455987567e-05, |
|
"loss": 3.7289, |
|
"step": 479000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.643238172577461e-05, |
|
"loss": 3.734, |
|
"step": 479500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6397378995561655e-05, |
|
"loss": 3.7287, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"eval_loss": 3.705913782119751, |
|
"eval_runtime": 182.0311, |
|
"eval_samples_per_second": 221.451, |
|
"eval_steps_per_second": 18.458, |
|
"step": 480000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.63623762653487e-05, |
|
"loss": 3.7323, |
|
"step": 480500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6327373535135743e-05, |
|
"loss": 3.7322, |
|
"step": 481000 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6292370804922784e-05, |
|
"loss": 3.7349, |
|
"step": 481500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 1.6257368074709828e-05, |
|
"loss": 3.7282, |
|
"step": 482000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.622236534449687e-05, |
|
"loss": 3.736, |
|
"step": 482500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6187362614283916e-05, |
|
"loss": 3.7321, |
|
"step": 483000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.615235988407096e-05, |
|
"loss": 3.7241, |
|
"step": 483500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6117357153858e-05, |
|
"loss": 3.7286, |
|
"step": 484000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6082354423645048e-05, |
|
"loss": 3.7298, |
|
"step": 484500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.604735169343209e-05, |
|
"loss": 3.7423, |
|
"step": 485000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.6012348963219132e-05, |
|
"loss": 3.7257, |
|
"step": 485500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5977346233006173e-05, |
|
"loss": 3.7322, |
|
"step": 486000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.594234350279322e-05, |
|
"loss": 3.7337, |
|
"step": 486500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.590734077258026e-05, |
|
"loss": 3.7311, |
|
"step": 487000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5872338042367305e-05, |
|
"loss": 3.7196, |
|
"step": 487500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.583733531215435e-05, |
|
"loss": 3.7361, |
|
"step": 488000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5802332581941393e-05, |
|
"loss": 3.7351, |
|
"step": 488500 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.5767329851728434e-05, |
|
"loss": 3.7274, |
|
"step": 489000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5732327121515478e-05, |
|
"loss": 3.7319, |
|
"step": 489500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5697324391302522e-05, |
|
"loss": 3.7303, |
|
"step": 490000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5662321661089566e-05, |
|
"loss": 3.7309, |
|
"step": 490500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.562731893087661e-05, |
|
"loss": 3.7252, |
|
"step": 491000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5592316200663654e-05, |
|
"loss": 3.7253, |
|
"step": 491500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5557313470450698e-05, |
|
"loss": 3.7317, |
|
"step": 492000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.552231074023774e-05, |
|
"loss": 3.724, |
|
"step": 492500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5487308010024782e-05, |
|
"loss": 3.724, |
|
"step": 493000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5452305279811826e-05, |
|
"loss": 3.7325, |
|
"step": 493500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.541730254959887e-05, |
|
"loss": 3.7266, |
|
"step": 494000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.538229981938591e-05, |
|
"loss": 3.7366, |
|
"step": 494500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.534729708917296e-05, |
|
"loss": 3.7254, |
|
"step": 495000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.531229435896e-05, |
|
"loss": 3.7388, |
|
"step": 495500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.5277291628747043e-05, |
|
"loss": 3.7303, |
|
"step": 496000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5242288898534086e-05, |
|
"loss": 3.7209, |
|
"step": 496500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5207286168321131e-05, |
|
"loss": 3.7274, |
|
"step": 497000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5172283438108174e-05, |
|
"loss": 3.7283, |
|
"step": 497500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5137280707895216e-05, |
|
"loss": 3.7339, |
|
"step": 498000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5102277977682262e-05, |
|
"loss": 3.7256, |
|
"step": 498500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5067275247469304e-05, |
|
"loss": 3.7307, |
|
"step": 499000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.5032272517256346e-05, |
|
"loss": 3.7307, |
|
"step": 499500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4997269787043392e-05, |
|
"loss": 3.7299, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"eval_loss": 3.7035679817199707, |
|
"eval_runtime": 181.7916, |
|
"eval_samples_per_second": 221.743, |
|
"eval_steps_per_second": 18.483, |
|
"step": 500000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4962267056830434e-05, |
|
"loss": 3.7238, |
|
"step": 500500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4927264326617477e-05, |
|
"loss": 3.7279, |
|
"step": 501000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4892261596404519e-05, |
|
"loss": 3.7296, |
|
"step": 501500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4857258866191565e-05, |
|
"loss": 3.7246, |
|
"step": 502000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4822256135978607e-05, |
|
"loss": 3.7365, |
|
"step": 502500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.478725340576565e-05, |
|
"loss": 3.7277, |
|
"step": 503000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 1.4752250675552695e-05, |
|
"loss": 3.7256, |
|
"step": 503500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4717247945339737e-05, |
|
"loss": 3.7264, |
|
"step": 504000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.468224521512678e-05, |
|
"loss": 3.7312, |
|
"step": 504500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4647242484913824e-05, |
|
"loss": 3.7298, |
|
"step": 505000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4612239754700868e-05, |
|
"loss": 3.724, |
|
"step": 505500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.457723702448791e-05, |
|
"loss": 3.7269, |
|
"step": 506000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4542234294274954e-05, |
|
"loss": 3.7298, |
|
"step": 506500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4507231564061998e-05, |
|
"loss": 3.7278, |
|
"step": 507000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4472228833849042e-05, |
|
"loss": 3.7269, |
|
"step": 507500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4437226103636084e-05, |
|
"loss": 3.7293, |
|
"step": 508000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4402223373423127e-05, |
|
"loss": 3.7259, |
|
"step": 508500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4367220643210172e-05, |
|
"loss": 3.7285, |
|
"step": 509000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4332217912997215e-05, |
|
"loss": 3.7346, |
|
"step": 509500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4297215182784257e-05, |
|
"loss": 3.7303, |
|
"step": 510000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.4262212452571303e-05, |
|
"loss": 3.7234, |
|
"step": 510500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4227209722358345e-05, |
|
"loss": 3.7276, |
|
"step": 511000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4192206992145387e-05, |
|
"loss": 3.7298, |
|
"step": 511500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.415720426193243e-05, |
|
"loss": 3.7217, |
|
"step": 512000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4122201531719476e-05, |
|
"loss": 3.7279, |
|
"step": 512500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4087198801506518e-05, |
|
"loss": 3.7317, |
|
"step": 513000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.405219607129356e-05, |
|
"loss": 3.7218, |
|
"step": 513500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.4017193341080606e-05, |
|
"loss": 3.735, |
|
"step": 514000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3982190610867648e-05, |
|
"loss": 3.7239, |
|
"step": 514500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.394718788065469e-05, |
|
"loss": 3.7276, |
|
"step": 515000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3912185150441735e-05, |
|
"loss": 3.7256, |
|
"step": 515500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3877182420228779e-05, |
|
"loss": 3.7258, |
|
"step": 516000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3842179690015823e-05, |
|
"loss": 3.7302, |
|
"step": 516500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3807176959802865e-05, |
|
"loss": 3.7226, |
|
"step": 517000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 1.3772174229589909e-05, |
|
"loss": 3.7216, |
|
"step": 517500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3737171499376953e-05, |
|
"loss": 3.7272, |
|
"step": 518000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3702168769163995e-05, |
|
"loss": 3.722, |
|
"step": 518500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3667166038951038e-05, |
|
"loss": 3.727, |
|
"step": 519000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3632163308738083e-05, |
|
"loss": 3.729, |
|
"step": 519500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3597160578525126e-05, |
|
"loss": 3.7242, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"eval_loss": 3.7008578777313232, |
|
"eval_runtime": 181.438, |
|
"eval_samples_per_second": 222.175, |
|
"eval_steps_per_second": 18.519, |
|
"step": 520000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3562157848312168e-05, |
|
"loss": 3.7181, |
|
"step": 520500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3527155118099214e-05, |
|
"loss": 3.7288, |
|
"step": 521000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3492152387886256e-05, |
|
"loss": 3.7245, |
|
"step": 521500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3457149657673298e-05, |
|
"loss": 3.7284, |
|
"step": 522000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.342214692746034e-05, |
|
"loss": 3.7211, |
|
"step": 522500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3387144197247386e-05, |
|
"loss": 3.7197, |
|
"step": 523000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3352141467034429e-05, |
|
"loss": 3.7312, |
|
"step": 523500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3317138736821471e-05, |
|
"loss": 3.725, |
|
"step": 524000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 1.3282136006608517e-05, |
|
"loss": 3.7207, |
|
"step": 524500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3247133276395559e-05, |
|
"loss": 3.7217, |
|
"step": 525000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3212130546182603e-05, |
|
"loss": 3.7261, |
|
"step": 525500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3177127815969647e-05, |
|
"loss": 3.7211, |
|
"step": 526000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.314212508575669e-05, |
|
"loss": 3.7218, |
|
"step": 526500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3107122355543733e-05, |
|
"loss": 3.7274, |
|
"step": 527000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3072119625330776e-05, |
|
"loss": 3.7189, |
|
"step": 527500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3037116895117821e-05, |
|
"loss": 3.7252, |
|
"step": 528000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.3002114164904864e-05, |
|
"loss": 3.7248, |
|
"step": 528500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2967111434691906e-05, |
|
"loss": 3.7292, |
|
"step": 529000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2932108704478952e-05, |
|
"loss": 3.7241, |
|
"step": 529500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2897105974265994e-05, |
|
"loss": 3.726, |
|
"step": 530000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2862103244053037e-05, |
|
"loss": 3.7276, |
|
"step": 530500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2827100513840079e-05, |
|
"loss": 3.7231, |
|
"step": 531000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2792097783627125e-05, |
|
"loss": 3.7211, |
|
"step": 531500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 1.2757095053414167e-05, |
|
"loss": 3.7211, |
|
"step": 532000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.272209232320121e-05, |
|
"loss": 3.7283, |
|
"step": 532500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2687089592988255e-05, |
|
"loss": 3.7188, |
|
"step": 533000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2652086862775297e-05, |
|
"loss": 3.7242, |
|
"step": 533500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.261708413256234e-05, |
|
"loss": 3.7208, |
|
"step": 534000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2582081402349382e-05, |
|
"loss": 3.7257, |
|
"step": 534500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2547078672136428e-05, |
|
"loss": 3.7217, |
|
"step": 535000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.251207594192347e-05, |
|
"loss": 3.7171, |
|
"step": 535500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2477073211710514e-05, |
|
"loss": 3.7264, |
|
"step": 536000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2442070481497558e-05, |
|
"loss": 3.7251, |
|
"step": 536500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2407067751284602e-05, |
|
"loss": 3.7299, |
|
"step": 537000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2372065021071644e-05, |
|
"loss": 3.7131, |
|
"step": 537500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2337062290858688e-05, |
|
"loss": 3.7217, |
|
"step": 538000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.230205956064573e-05, |
|
"loss": 3.7159, |
|
"step": 538500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 1.2267056830432775e-05, |
|
"loss": 3.7214, |
|
"step": 539000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2232054100219819e-05, |
|
"loss": 3.7263, |
|
"step": 539500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2197051370006861e-05, |
|
"loss": 3.7235, |
|
"step": 540000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"eval_loss": 3.6987826824188232, |
|
"eval_runtime": 181.2395, |
|
"eval_samples_per_second": 222.418, |
|
"eval_steps_per_second": 18.539, |
|
"step": 540000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2162048639793905e-05, |
|
"loss": 3.7143, |
|
"step": 540500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2127045909580947e-05, |
|
"loss": 3.718, |
|
"step": 541000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2092043179367991e-05, |
|
"loss": 3.7179, |
|
"step": 541500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2057040449155034e-05, |
|
"loss": 3.7215, |
|
"step": 542000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.2022037718942078e-05, |
|
"loss": 3.7294, |
|
"step": 542500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.1987034988729122e-05, |
|
"loss": 3.7275, |
|
"step": 543000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.1952032258516164e-05, |
|
"loss": 3.7293, |
|
"step": 543500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.1917029528303208e-05, |
|
"loss": 3.7208, |
|
"step": 544000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.188202679809025e-05, |
|
"loss": 3.731, |
|
"step": 544500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.1847024067877294e-05, |
|
"loss": 3.7227, |
|
"step": 545000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.1812021337664338e-05, |
|
"loss": 3.7265, |
|
"step": 545500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 1.177701860745138e-05, |
|
"loss": 3.7287, |
|
"step": 546000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1742015877238425e-05, |
|
"loss": 3.7239, |
|
"step": 546500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1707013147025469e-05, |
|
"loss": 3.7189, |
|
"step": 547000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1672010416812513e-05, |
|
"loss": 3.7184, |
|
"step": 547500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1637007686599555e-05, |
|
"loss": 3.7253, |
|
"step": 548000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.16020049563866e-05, |
|
"loss": 3.7163, |
|
"step": 548500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1567002226173643e-05, |
|
"loss": 3.7198, |
|
"step": 549000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1531999495960686e-05, |
|
"loss": 3.716, |
|
"step": 549500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.149699676574773e-05, |
|
"loss": 3.7236, |
|
"step": 550000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1461994035534772e-05, |
|
"loss": 3.7181, |
|
"step": 550500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1426991305321816e-05, |
|
"loss": 3.7177, |
|
"step": 551000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1391988575108858e-05, |
|
"loss": 3.7184, |
|
"step": 551500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1356985844895902e-05, |
|
"loss": 3.7187, |
|
"step": 552000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1321983114682946e-05, |
|
"loss": 3.7166, |
|
"step": 552500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1286980384469989e-05, |
|
"loss": 3.7159, |
|
"step": 553000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 1.1251977654257033e-05, |
|
"loss": 3.7158, |
|
"step": 553500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1216974924044075e-05, |
|
"loss": 3.7194, |
|
"step": 554000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1181972193831119e-05, |
|
"loss": 3.7231, |
|
"step": 554500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1146969463618161e-05, |
|
"loss": 3.7216, |
|
"step": 555000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1111966733405205e-05, |
|
"loss": 3.7276, |
|
"step": 555500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.107696400319225e-05, |
|
"loss": 3.7077, |
|
"step": 556000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1041961272979293e-05, |
|
"loss": 3.7189, |
|
"step": 556500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.1006958542766337e-05, |
|
"loss": 3.7156, |
|
"step": 557000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.097195581255338e-05, |
|
"loss": 3.7216, |
|
"step": 557500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.0936953082340424e-05, |
|
"loss": 3.7276, |
|
"step": 558000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.0901950352127468e-05, |
|
"loss": 3.7118, |
|
"step": 558500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.086694762191451e-05, |
|
"loss": 3.7178, |
|
"step": 559000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.0831944891701554e-05, |
|
"loss": 3.7233, |
|
"step": 559500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.0796942161488596e-05, |
|
"loss": 3.7183, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_loss": 3.6952383518218994, |
|
"eval_runtime": 182.804, |
|
"eval_samples_per_second": 220.515, |
|
"eval_steps_per_second": 18.38, |
|
"step": 560000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 1.076193943127564e-05, |
|
"loss": 3.7187, |
|
"step": 560500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0726936701062683e-05, |
|
"loss": 3.7139, |
|
"step": 561000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0691933970849727e-05, |
|
"loss": 3.7188, |
|
"step": 561500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.065693124063677e-05, |
|
"loss": 3.7155, |
|
"step": 562000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0621928510423813e-05, |
|
"loss": 3.7191, |
|
"step": 562500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0586925780210857e-05, |
|
"loss": 3.7216, |
|
"step": 563000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.05519230499979e-05, |
|
"loss": 3.7143, |
|
"step": 563500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0516920319784943e-05, |
|
"loss": 3.7224, |
|
"step": 564000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0481917589571986e-05, |
|
"loss": 3.7213, |
|
"step": 564500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.044691485935903e-05, |
|
"loss": 3.7167, |
|
"step": 565000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0411912129146074e-05, |
|
"loss": 3.719, |
|
"step": 565500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0376909398933118e-05, |
|
"loss": 3.7239, |
|
"step": 566000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.034190666872016e-05, |
|
"loss": 3.7153, |
|
"step": 566500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0306903938507204e-05, |
|
"loss": 3.7181, |
|
"step": 567000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 1.0271901208294248e-05, |
|
"loss": 3.7179, |
|
"step": 567500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.023689847808129e-05, |
|
"loss": 3.7166, |
|
"step": 568000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0201895747868335e-05, |
|
"loss": 3.7088, |
|
"step": 568500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0166893017655379e-05, |
|
"loss": 3.7234, |
|
"step": 569000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0131890287442421e-05, |
|
"loss": 3.722, |
|
"step": 569500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0096887557229465e-05, |
|
"loss": 3.7186, |
|
"step": 570000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0061884827016507e-05, |
|
"loss": 3.7152, |
|
"step": 570500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.0026882096803551e-05, |
|
"loss": 3.7201, |
|
"step": 571000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.991879366590595e-06, |
|
"loss": 3.7297, |
|
"step": 571500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.956876636377638e-06, |
|
"loss": 3.7108, |
|
"step": 572000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.921873906164682e-06, |
|
"loss": 3.7198, |
|
"step": 572500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.886871175951724e-06, |
|
"loss": 3.7192, |
|
"step": 573000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.851868445738768e-06, |
|
"loss": 3.7156, |
|
"step": 573500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.81686571552581e-06, |
|
"loss": 3.7199, |
|
"step": 574000 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.781862985312854e-06, |
|
"loss": 3.7165, |
|
"step": 574500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.746860255099898e-06, |
|
"loss": 3.7239, |
|
"step": 575000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.71185752488694e-06, |
|
"loss": 3.7149, |
|
"step": 575500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.676854794673985e-06, |
|
"loss": 3.7132, |
|
"step": 576000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.641852064461029e-06, |
|
"loss": 3.7091, |
|
"step": 576500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.606849334248073e-06, |
|
"loss": 3.7132, |
|
"step": 577000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.571846604035115e-06, |
|
"loss": 3.7165, |
|
"step": 577500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.536843873822159e-06, |
|
"loss": 3.7106, |
|
"step": 578000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.501841143609203e-06, |
|
"loss": 3.7186, |
|
"step": 578500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.466838413396245e-06, |
|
"loss": 3.7159, |
|
"step": 579000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.43183568318329e-06, |
|
"loss": 3.7171, |
|
"step": 579500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.396832952970332e-06, |
|
"loss": 3.7224, |
|
"step": 580000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"eval_loss": 3.6943321228027344, |
|
"eval_runtime": 185.8078, |
|
"eval_samples_per_second": 216.95, |
|
"eval_steps_per_second": 18.083, |
|
"step": 580000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.361830222757376e-06, |
|
"loss": 3.7163, |
|
"step": 580500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.326827492544418e-06, |
|
"loss": 3.7143, |
|
"step": 581000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.291824762331462e-06, |
|
"loss": 3.7246, |
|
"step": 581500 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.256822032118506e-06, |
|
"loss": 3.717, |
|
"step": 582000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.221819301905549e-06, |
|
"loss": 3.7136, |
|
"step": 582500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.186816571692593e-06, |
|
"loss": 3.708, |
|
"step": 583000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.151813841479635e-06, |
|
"loss": 3.7157, |
|
"step": 583500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.116811111266679e-06, |
|
"loss": 3.716, |
|
"step": 584000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.081808381053723e-06, |
|
"loss": 3.7155, |
|
"step": 584500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.046805650840765e-06, |
|
"loss": 3.7189, |
|
"step": 585000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 9.01180292062781e-06, |
|
"loss": 3.7185, |
|
"step": 585500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.976800190414853e-06, |
|
"loss": 3.711, |
|
"step": 586000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.941797460201896e-06, |
|
"loss": 3.7209, |
|
"step": 586500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.90679472998894e-06, |
|
"loss": 3.7124, |
|
"step": 587000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.871791999775984e-06, |
|
"loss": 3.7133, |
|
"step": 587500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.836789269563028e-06, |
|
"loss": 3.7136, |
|
"step": 588000 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.80178653935007e-06, |
|
"loss": 3.713, |
|
"step": 588500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 8.766783809137114e-06, |
|
"loss": 3.7125, |
|
"step": 589000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.731781078924156e-06, |
|
"loss": 3.707, |
|
"step": 589500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.6967783487112e-06, |
|
"loss": 3.7072, |
|
"step": 590000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.661775618498243e-06, |
|
"loss": 3.7145, |
|
"step": 590500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.626772888285287e-06, |
|
"loss": 3.7072, |
|
"step": 591000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.59177015807233e-06, |
|
"loss": 3.7151, |
|
"step": 591500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.556767427859373e-06, |
|
"loss": 3.7235, |
|
"step": 592000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.521764697646417e-06, |
|
"loss": 3.7205, |
|
"step": 592500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.48676196743346e-06, |
|
"loss": 3.7145, |
|
"step": 593000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.451759237220503e-06, |
|
"loss": 3.7119, |
|
"step": 593500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.416756507007546e-06, |
|
"loss": 3.7074, |
|
"step": 594000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.38175377679459e-06, |
|
"loss": 3.7151, |
|
"step": 594500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.346751046581634e-06, |
|
"loss": 3.7092, |
|
"step": 595000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.311748316368676e-06, |
|
"loss": 3.7169, |
|
"step": 595500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 8.27674558615572e-06, |
|
"loss": 3.7108, |
|
"step": 596000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.241742855942764e-06, |
|
"loss": 3.7104, |
|
"step": 596500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.206740125729808e-06, |
|
"loss": 3.7148, |
|
"step": 597000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.171737395516852e-06, |
|
"loss": 3.71, |
|
"step": 597500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.136734665303894e-06, |
|
"loss": 3.7166, |
|
"step": 598000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.101731935090939e-06, |
|
"loss": 3.7119, |
|
"step": 598500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.06672920487798e-06, |
|
"loss": 3.7157, |
|
"step": 599000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 8.031726474665025e-06, |
|
"loss": 3.7127, |
|
"step": 599500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.996723744452067e-06, |
|
"loss": 3.7105, |
|
"step": 600000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 3.6923341751098633, |
|
"eval_runtime": 184.3386, |
|
"eval_samples_per_second": 218.679, |
|
"eval_steps_per_second": 18.227, |
|
"step": 600000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.961721014239111e-06, |
|
"loss": 3.7176, |
|
"step": 600500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.926718284026155e-06, |
|
"loss": 3.7105, |
|
"step": 601000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.891715553813198e-06, |
|
"loss": 3.7145, |
|
"step": 601500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.856712823600242e-06, |
|
"loss": 3.71, |
|
"step": 602000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.821710093387284e-06, |
|
"loss": 3.7166, |
|
"step": 602500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.786707363174328e-06, |
|
"loss": 3.7054, |
|
"step": 603000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 7.75170463296137e-06, |
|
"loss": 3.7121, |
|
"step": 603500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.716701902748414e-06, |
|
"loss": 3.719, |
|
"step": 604000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.681699172535458e-06, |
|
"loss": 3.7162, |
|
"step": 604500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.6466964423225e-06, |
|
"loss": 3.7163, |
|
"step": 605000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.6116937121095455e-06, |
|
"loss": 3.7104, |
|
"step": 605500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.576690981896588e-06, |
|
"loss": 3.7105, |
|
"step": 606000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.541688251683632e-06, |
|
"loss": 3.7076, |
|
"step": 606500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.506685521470674e-06, |
|
"loss": 3.7127, |
|
"step": 607000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.471682791257718e-06, |
|
"loss": 3.7095, |
|
"step": 607500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.436680061044762e-06, |
|
"loss": 3.7166, |
|
"step": 608000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.401677330831805e-06, |
|
"loss": 3.7087, |
|
"step": 608500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.3666746006188485e-06, |
|
"loss": 3.7162, |
|
"step": 609000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.331671870405892e-06, |
|
"loss": 3.7177, |
|
"step": 609500 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.296669140192936e-06, |
|
"loss": 3.7143, |
|
"step": 610000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 7.26166640997998e-06, |
|
"loss": 3.7176, |
|
"step": 610500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.226663679767022e-06, |
|
"loss": 3.7082, |
|
"step": 611000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.191660949554066e-06, |
|
"loss": 3.7167, |
|
"step": 611500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.156658219341108e-06, |
|
"loss": 3.7109, |
|
"step": 612000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.121655489128152e-06, |
|
"loss": 3.7098, |
|
"step": 612500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.086652758915195e-06, |
|
"loss": 3.7204, |
|
"step": 613000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.051650028702239e-06, |
|
"loss": 3.7077, |
|
"step": 613500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 7.016647298489283e-06, |
|
"loss": 3.7102, |
|
"step": 614000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.981644568276326e-06, |
|
"loss": 3.7139, |
|
"step": 614500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.94664183806337e-06, |
|
"loss": 3.7102, |
|
"step": 615000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.911639107850412e-06, |
|
"loss": 3.713, |
|
"step": 615500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.876636377637456e-06, |
|
"loss": 3.7136, |
|
"step": 616000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.841633647424499e-06, |
|
"loss": 3.7162, |
|
"step": 616500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.806630917211543e-06, |
|
"loss": 3.7165, |
|
"step": 617000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.771628186998587e-06, |
|
"loss": 3.7126, |
|
"step": 617500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.736625456785629e-06, |
|
"loss": 3.7053, |
|
"step": 618000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.701622726572673e-06, |
|
"loss": 3.7112, |
|
"step": 618500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.666619996359716e-06, |
|
"loss": 3.7077, |
|
"step": 619000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.63161726614676e-06, |
|
"loss": 3.7136, |
|
"step": 619500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.5966145359338026e-06, |
|
"loss": 3.7126, |
|
"step": 620000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"eval_loss": 3.6894869804382324, |
|
"eval_runtime": 182.4973, |
|
"eval_samples_per_second": 220.885, |
|
"eval_steps_per_second": 18.411, |
|
"step": 620000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.5616118057208466e-06, |
|
"loss": 3.7079, |
|
"step": 620500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.526609075507891e-06, |
|
"loss": 3.7118, |
|
"step": 621000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.491606345294933e-06, |
|
"loss": 3.7064, |
|
"step": 621500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.456603615081977e-06, |
|
"loss": 3.7093, |
|
"step": 622000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.421600884869019e-06, |
|
"loss": 3.7121, |
|
"step": 622500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.386598154656063e-06, |
|
"loss": 3.7025, |
|
"step": 623000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.351595424443107e-06, |
|
"loss": 3.7187, |
|
"step": 623500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.3165926942301505e-06, |
|
"loss": 3.706, |
|
"step": 624000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.2815899640171945e-06, |
|
"loss": 3.7105, |
|
"step": 624500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.246587233804238e-06, |
|
"loss": 3.7152, |
|
"step": 625000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.211584503591281e-06, |
|
"loss": 3.7099, |
|
"step": 625500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.176581773378324e-06, |
|
"loss": 3.6976, |
|
"step": 626000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.141579043165367e-06, |
|
"loss": 3.7113, |
|
"step": 626500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.10657631295241e-06, |
|
"loss": 3.7144, |
|
"step": 627000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.0715735827394535e-06, |
|
"loss": 3.7019, |
|
"step": 627500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.036570852526497e-06, |
|
"loss": 3.7186, |
|
"step": 628000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.001568122313541e-06, |
|
"loss": 3.7146, |
|
"step": 628500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.966565392100584e-06, |
|
"loss": 3.7121, |
|
"step": 629000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.931562661887628e-06, |
|
"loss": 3.7161, |
|
"step": 629500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.896559931674671e-06, |
|
"loss": 3.7128, |
|
"step": 630000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.861557201461714e-06, |
|
"loss": 3.7139, |
|
"step": 630500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.8265544712487574e-06, |
|
"loss": 3.7171, |
|
"step": 631000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.7915517410358015e-06, |
|
"loss": 3.7109, |
|
"step": 631500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 5.756549010822845e-06, |
|
"loss": 3.7093, |
|
"step": 632000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.721546280609888e-06, |
|
"loss": 3.7083, |
|
"step": 632500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.686543550396931e-06, |
|
"loss": 3.7106, |
|
"step": 633000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.651540820183974e-06, |
|
"loss": 3.7039, |
|
"step": 633500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.616538089971018e-06, |
|
"loss": 3.7124, |
|
"step": 634000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.581535359758061e-06, |
|
"loss": 3.7111, |
|
"step": 634500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.546532629545105e-06, |
|
"loss": 3.7156, |
|
"step": 635000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.5115298993321485e-06, |
|
"loss": 3.7136, |
|
"step": 635500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.476527169119192e-06, |
|
"loss": 3.7059, |
|
"step": 636000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.441524438906235e-06, |
|
"loss": 3.7056, |
|
"step": 636500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.406521708693278e-06, |
|
"loss": 3.6957, |
|
"step": 637000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.371518978480321e-06, |
|
"loss": 3.7045, |
|
"step": 637500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.336516248267365e-06, |
|
"loss": 3.7077, |
|
"step": 638000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.301513518054408e-06, |
|
"loss": 3.7045, |
|
"step": 638500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.266510787841452e-06, |
|
"loss": 3.7042, |
|
"step": 639000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.231508057628496e-06, |
|
"loss": 3.7073, |
|
"step": 639500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.196505327415539e-06, |
|
"loss": 3.7089, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"eval_loss": 3.688789129257202, |
|
"eval_runtime": 183.1343, |
|
"eval_samples_per_second": 220.117, |
|
"eval_steps_per_second": 18.347, |
|
"step": 640000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.161502597202582e-06, |
|
"loss": 3.7079, |
|
"step": 640500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.126499866989625e-06, |
|
"loss": 3.7046, |
|
"step": 641000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.091497136776669e-06, |
|
"loss": 3.7044, |
|
"step": 641500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.056494406563712e-06, |
|
"loss": 3.7071, |
|
"step": 642000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.0214916763507555e-06, |
|
"loss": 3.708, |
|
"step": 642500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.986488946137799e-06, |
|
"loss": 3.7061, |
|
"step": 643000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.951486215924842e-06, |
|
"loss": 3.7104, |
|
"step": 643500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.916483485711886e-06, |
|
"loss": 3.71, |
|
"step": 644000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.88148075549893e-06, |
|
"loss": 3.7113, |
|
"step": 644500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.846478025285973e-06, |
|
"loss": 3.7083, |
|
"step": 645000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.811475295073016e-06, |
|
"loss": 3.7076, |
|
"step": 645500 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.776472564860059e-06, |
|
"loss": 3.7028, |
|
"step": 646000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.7414698346471026e-06, |
|
"loss": 3.7117, |
|
"step": 646500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.706467104434146e-06, |
|
"loss": 3.7065, |
|
"step": 647000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.671464374221189e-06, |
|
"loss": 3.7108, |
|
"step": 647500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.636461644008233e-06, |
|
"loss": 3.7052, |
|
"step": 648000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.601458913795276e-06, |
|
"loss": 3.7013, |
|
"step": 648500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.566456183582319e-06, |
|
"loss": 3.7103, |
|
"step": 649000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.531453453369363e-06, |
|
"loss": 3.7054, |
|
"step": 649500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.4964507231564065e-06, |
|
"loss": 3.7021, |
|
"step": 650000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.46144799294345e-06, |
|
"loss": 3.7028, |
|
"step": 650500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.426445262730494e-06, |
|
"loss": 3.7099, |
|
"step": 651000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.391442532517537e-06, |
|
"loss": 3.7021, |
|
"step": 651500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.35643980230458e-06, |
|
"loss": 3.7124, |
|
"step": 652000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.321437072091623e-06, |
|
"loss": 3.7058, |
|
"step": 652500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.286434341878666e-06, |
|
"loss": 3.7073, |
|
"step": 653000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.2514316116657095e-06, |
|
"loss": 3.7094, |
|
"step": 653500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.2164288814527535e-06, |
|
"loss": 3.7112, |
|
"step": 654000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.1814261512397976e-06, |
|
"loss": 3.6986, |
|
"step": 654500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.146423421026841e-06, |
|
"loss": 3.6998, |
|
"step": 655000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.111420690813884e-06, |
|
"loss": 3.7039, |
|
"step": 655500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.076417960600927e-06, |
|
"loss": 3.7069, |
|
"step": 656000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.04141523038797e-06, |
|
"loss": 3.7011, |
|
"step": 656500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 4.0064125001750134e-06, |
|
"loss": 3.7084, |
|
"step": 657000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.9714097699620574e-06, |
|
"loss": 3.7014, |
|
"step": 657500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.936407039749101e-06, |
|
"loss": 3.7022, |
|
"step": 658000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.901404309536144e-06, |
|
"loss": 3.7097, |
|
"step": 658500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.866401579323188e-06, |
|
"loss": 3.7015, |
|
"step": 659000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.831398849110231e-06, |
|
"loss": 3.7074, |
|
"step": 659500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.796396118897274e-06, |
|
"loss": 3.7002, |
|
"step": 660000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"eval_loss": 3.6861209869384766, |
|
"eval_runtime": 182.3225, |
|
"eval_samples_per_second": 221.097, |
|
"eval_steps_per_second": 18.429, |
|
"step": 660000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.7613933886843173e-06, |
|
"loss": 3.7113, |
|
"step": 660500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.7263906584713614e-06, |
|
"loss": 3.7097, |
|
"step": 661000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6913879282584045e-06, |
|
"loss": 3.7119, |
|
"step": 661500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.6563851980454477e-06, |
|
"loss": 3.714, |
|
"step": 662000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.621382467832491e-06, |
|
"loss": 3.707, |
|
"step": 662500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.5863797376195345e-06, |
|
"loss": 3.6998, |
|
"step": 663000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.5513770074065776e-06, |
|
"loss": 3.7068, |
|
"step": 663500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.5163742771936217e-06, |
|
"loss": 3.7037, |
|
"step": 664000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.481371546980665e-06, |
|
"loss": 3.7126, |
|
"step": 664500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.446368816767708e-06, |
|
"loss": 3.7085, |
|
"step": 665000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.4113660865547516e-06, |
|
"loss": 3.7135, |
|
"step": 665500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.3763633563417948e-06, |
|
"loss": 3.7093, |
|
"step": 666000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.341360626128838e-06, |
|
"loss": 3.7093, |
|
"step": 666500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.306357895915881e-06, |
|
"loss": 3.7133, |
|
"step": 667000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.271355165702925e-06, |
|
"loss": 3.7041, |
|
"step": 667500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.2363524354899687e-06, |
|
"loss": 3.708, |
|
"step": 668000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.201349705277012e-06, |
|
"loss": 3.7132, |
|
"step": 668500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.166346975064055e-06, |
|
"loss": 3.6983, |
|
"step": 669000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.1313442448510983e-06, |
|
"loss": 3.6999, |
|
"step": 669500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.096341514638142e-06, |
|
"loss": 3.7076, |
|
"step": 670000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.0613387844251854e-06, |
|
"loss": 3.7053, |
|
"step": 670500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 3.0263360542122286e-06, |
|
"loss": 3.7065, |
|
"step": 671000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9913333239992722e-06, |
|
"loss": 3.7057, |
|
"step": 671500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9563305937863154e-06, |
|
"loss": 3.7085, |
|
"step": 672000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.9213278635733586e-06, |
|
"loss": 3.7094, |
|
"step": 672500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.8863251333604026e-06, |
|
"loss": 3.7021, |
|
"step": 673000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.8513224031474458e-06, |
|
"loss": 3.6971, |
|
"step": 673500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.816319672934489e-06, |
|
"loss": 3.7109, |
|
"step": 674000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.7813169427215325e-06, |
|
"loss": 3.6993, |
|
"step": 674500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.7463142125085757e-06, |
|
"loss": 3.6976, |
|
"step": 675000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.7113114822956193e-06, |
|
"loss": 3.6992, |
|
"step": 675500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.6763087520826625e-06, |
|
"loss": 3.6955, |
|
"step": 676000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.641306021869706e-06, |
|
"loss": 3.7113, |
|
"step": 676500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.6063032916567492e-06, |
|
"loss": 3.6985, |
|
"step": 677000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.5713005614437924e-06, |
|
"loss": 3.7035, |
|
"step": 677500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.5362978312308364e-06, |
|
"loss": 3.6965, |
|
"step": 678000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.5012951010178796e-06, |
|
"loss": 3.7069, |
|
"step": 678500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.4662923708049228e-06, |
|
"loss": 3.704, |
|
"step": 679000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.4312896405919664e-06, |
|
"loss": 3.7028, |
|
"step": 679500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3962869103790095e-06, |
|
"loss": 3.6979, |
|
"step": 680000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"eval_loss": 3.6850595474243164, |
|
"eval_runtime": 181.6109, |
|
"eval_samples_per_second": 221.964, |
|
"eval_steps_per_second": 18.501, |
|
"step": 680000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.361284180166053e-06, |
|
"loss": 3.6957, |
|
"step": 680500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.3262814499530967e-06, |
|
"loss": 3.6992, |
|
"step": 681000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.29127871974014e-06, |
|
"loss": 3.692, |
|
"step": 681500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 2.256275989527183e-06, |
|
"loss": 3.7092, |
|
"step": 682000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.2212732593142262e-06, |
|
"loss": 3.7033, |
|
"step": 682500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1862705291012703e-06, |
|
"loss": 3.7022, |
|
"step": 683000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1512677988883134e-06, |
|
"loss": 3.7059, |
|
"step": 683500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.1162650686753566e-06, |
|
"loss": 3.6916, |
|
"step": 684000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.0812623384624e-06, |
|
"loss": 3.7047, |
|
"step": 684500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.0462596082494434e-06, |
|
"loss": 3.7114, |
|
"step": 685000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.011256878036487e-06, |
|
"loss": 3.6965, |
|
"step": 685500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9762541478235306e-06, |
|
"loss": 3.7056, |
|
"step": 686000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.9412514176105737e-06, |
|
"loss": 3.7, |
|
"step": 686500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.906248687397617e-06, |
|
"loss": 3.7007, |
|
"step": 687000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8712459571846607e-06, |
|
"loss": 3.6984, |
|
"step": 687500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.836243226971704e-06, |
|
"loss": 3.6937, |
|
"step": 688000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.8012404967587473e-06, |
|
"loss": 3.7011, |
|
"step": 688500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.7662377665457905e-06, |
|
"loss": 3.7057, |
|
"step": 689000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.731235036332834e-06, |
|
"loss": 3.6987, |
|
"step": 689500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6962323061198774e-06, |
|
"loss": 3.7017, |
|
"step": 690000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6612295759069206e-06, |
|
"loss": 3.7061, |
|
"step": 690500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.6262268456939642e-06, |
|
"loss": 3.7088, |
|
"step": 691000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5912241154810076e-06, |
|
"loss": 3.7, |
|
"step": 691500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.556221385268051e-06, |
|
"loss": 3.6981, |
|
"step": 692000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.5212186550550944e-06, |
|
"loss": 3.6997, |
|
"step": 692500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4862159248421377e-06, |
|
"loss": 3.7004, |
|
"step": 693000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4512131946291811e-06, |
|
"loss": 3.7006, |
|
"step": 693500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.4162104644162245e-06, |
|
"loss": 3.7022, |
|
"step": 694000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.381207734203268e-06, |
|
"loss": 3.6992, |
|
"step": 694500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3462050039903113e-06, |
|
"loss": 3.7036, |
|
"step": 695000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.3112022737773547e-06, |
|
"loss": 3.7027, |
|
"step": 695500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 1.276199543564398e-06, |
|
"loss": 3.7025, |
|
"step": 696000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2411968133514414e-06, |
|
"loss": 3.707, |
|
"step": 696500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2061940831384848e-06, |
|
"loss": 3.7104, |
|
"step": 697000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1711913529255282e-06, |
|
"loss": 3.6986, |
|
"step": 697500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.1361886227125716e-06, |
|
"loss": 3.7031, |
|
"step": 698000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.101185892499615e-06, |
|
"loss": 3.7025, |
|
"step": 698500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0661831622866584e-06, |
|
"loss": 3.7021, |
|
"step": 699000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.0311804320737017e-06, |
|
"loss": 3.7044, |
|
"step": 699500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.961777018607453e-07, |
|
"loss": 3.6946, |
|
"step": 700000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"eval_loss": 3.683551073074341, |
|
"eval_runtime": 181.9013, |
|
"eval_samples_per_second": 221.609, |
|
"eval_steps_per_second": 18.472, |
|
"step": 700000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.611749716477885e-07, |
|
"loss": 3.7051, |
|
"step": 700500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.26172241434832e-07, |
|
"loss": 3.6967, |
|
"step": 701000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.911695112218753e-07, |
|
"loss": 3.7009, |
|
"step": 701500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.561667810089188e-07, |
|
"loss": 3.6992, |
|
"step": 702000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 8.211640507959622e-07, |
|
"loss": 3.699, |
|
"step": 702500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.861613205830054e-07, |
|
"loss": 3.7081, |
|
"step": 703000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.511585903700489e-07, |
|
"loss": 3.7041, |
|
"step": 703500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.161558601570923e-07, |
|
"loss": 3.6968, |
|
"step": 704000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.811531299441357e-07, |
|
"loss": 3.6986, |
|
"step": 704500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.461503997311791e-07, |
|
"loss": 3.7021, |
|
"step": 705000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.111476695182225e-07, |
|
"loss": 3.7039, |
|
"step": 705500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.761449393052658e-07, |
|
"loss": 3.702, |
|
"step": 706000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.411422090923092e-07, |
|
"loss": 3.6997, |
|
"step": 706500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.061394788793526e-07, |
|
"loss": 3.7019, |
|
"step": 707000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.7113674866639606e-07, |
|
"loss": 3.7077, |
|
"step": 707500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.361340184534394e-07, |
|
"loss": 3.7001, |
|
"step": 708000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.0113128824048277e-07, |
|
"loss": 3.7046, |
|
"step": 708500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.661285580275262e-07, |
|
"loss": 3.71, |
|
"step": 709000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.3112582781456954e-07, |
|
"loss": 3.6954, |
|
"step": 709500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.961230976016129e-07, |
|
"loss": 3.7062, |
|
"step": 710000 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.611203673886563e-07, |
|
"loss": 3.6967, |
|
"step": 710500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.261176371756997e-07, |
|
"loss": 3.7019, |
|
"step": 711000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.911149069627431e-07, |
|
"loss": 3.6934, |
|
"step": 711500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.561121767497865e-07, |
|
"loss": 3.7044, |
|
"step": 712000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.211094465368299e-07, |
|
"loss": 3.7059, |
|
"step": 712500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 8.610671632387327e-08, |
|
"loss": 3.6842, |
|
"step": 713000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.1103986110916656e-08, |
|
"loss": 3.6974, |
|
"step": 713500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.610125589796004e-08, |
|
"loss": 3.7086, |
|
"step": 714000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 714230, |
|
"total_flos": 2.2394702390561341e+18, |
|
"train_loss": 3.773873895684371, |
|
"train_runtime": 142283.9744, |
|
"train_samples_per_second": 60.237, |
|
"train_steps_per_second": 5.02 |
|
} |
|
], |
|
"max_steps": 714230, |
|
"num_train_epochs": 1, |
|
"total_flos": 2.2394702390561341e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|