|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 17.0, |
|
"global_step": 86615, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-09, |
|
"loss": 10.525, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 2.5e-06, |
|
"loss": 9.5219, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 5e-06, |
|
"loss": 7.6948, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 7.5e-06, |
|
"loss": 6.4159, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1e-05, |
|
"loss": 6.1298, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.25e-05, |
|
"loss": 6.0017, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 1.5e-05, |
|
"loss": 5.9201, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 1.75e-05, |
|
"loss": 5.8528, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2e-05, |
|
"loss": 5.8017, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 2.25e-05, |
|
"loss": 5.7573, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.5e-05, |
|
"loss": 5.7162, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 2.7495000000000004e-05, |
|
"loss": 5.6827, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.9995e-05, |
|
"loss": 5.6557, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.2495000000000007e-05, |
|
"loss": 5.6229, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.4995e-05, |
|
"loss": 5.599, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 3.7495e-05, |
|
"loss": 5.5785, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 3.9995000000000006e-05, |
|
"loss": 5.5579, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 4.2495e-05, |
|
"loss": 5.5374, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 4.4995000000000005e-05, |
|
"loss": 5.5223, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 4.7495e-05, |
|
"loss": 5.5075, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 4.9995000000000005e-05, |
|
"loss": 5.4966, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.9987721893491125e-05, |
|
"loss": 5.4806, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 4.997539447731755e-05, |
|
"loss": 5.4673, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 4.996306706114399e-05, |
|
"loss": 5.4577, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 4.9950739644970415e-05, |
|
"loss": 5.4477, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 4.993841222879685e-05, |
|
"loss": 5.4356, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 4.9926084812623276e-05, |
|
"loss": 5.4225, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 4.9913757396449704e-05, |
|
"loss": 5.4196, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 4.990142998027614e-05, |
|
"loss": 5.4087, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 4.9889127218934914e-05, |
|
"loss": 5.4021, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 4.987679980276134e-05, |
|
"loss": 5.3944, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 4.986449704142012e-05, |
|
"loss": 5.3828, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 4.985216962524655e-05, |
|
"loss": 5.3786, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 4.983984220907298e-05, |
|
"loss": 5.3733, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 4.982751479289941e-05, |
|
"loss": 5.3666, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 4.981518737672584e-05, |
|
"loss": 5.3626, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.53, |
|
"learning_rate": 4.9802859960552274e-05, |
|
"loss": 5.353, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 4.97905325443787e-05, |
|
"loss": 5.3496, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.73, |
|
"learning_rate": 4.977820512820513e-05, |
|
"loss": 5.3454, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 4.976590236686391e-05, |
|
"loss": 5.3396, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 4.975359960552268e-05, |
|
"loss": 5.3352, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 4.974129684418146e-05, |
|
"loss": 5.3312, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 4.9728969428007896e-05, |
|
"loss": 5.3245, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.9716642011834323e-05, |
|
"loss": 5.3126, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 4.970431459566075e-05, |
|
"loss": 5.0663, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 4.969198717948718e-05, |
|
"loss": 4.7802, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 4.967965976331361e-05, |
|
"loss": 4.5089, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 4.966733234714005e-05, |
|
"loss": 4.2703, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 4.71, |
|
"learning_rate": 4.965500493096647e-05, |
|
"loss": 4.0607, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 4.96426775147929e-05, |
|
"loss": 3.8757, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 4.963037475345168e-05, |
|
"loss": 3.5749, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 4.961807199211045e-05, |
|
"loss": 3.0471, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 4.960574457593689e-05, |
|
"loss": 2.5977, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 4.9593417159763314e-05, |
|
"loss": 2.1306, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 4.958108974358975e-05, |
|
"loss": 1.9047, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.9568762327416176e-05, |
|
"loss": 1.7619, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.9556434911242603e-05, |
|
"loss": 1.6574, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 4.954410749506904e-05, |
|
"loss": 1.5785, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.9531780078895465e-05, |
|
"loss": 1.5168, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 4.951945266272189e-05, |
|
"loss": 1.467, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 4.950712524654833e-05, |
|
"loss": 1.4234, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 4.94948224852071e-05, |
|
"loss": 1.3853, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.948251972386588e-05, |
|
"loss": 1.3438, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 4.947019230769231e-05, |
|
"loss": 1.2958, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 4.945786489151874e-05, |
|
"loss": 1.241, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 4.9445537475345174e-05, |
|
"loss": 1.1933, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 4.9433210059171594e-05, |
|
"loss": 1.1561, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 4.942088264299803e-05, |
|
"loss": 1.1211, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 4.940855522682446e-05, |
|
"loss": 1.097, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 4.939622781065089e-05, |
|
"loss": 1.0713, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 4.9383925049309666e-05, |
|
"loss": 1.0513, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 6.97, |
|
"learning_rate": 4.937159763313609e-05, |
|
"loss": 1.0334, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 4.9359294871794875e-05, |
|
"loss": 1.012, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 4.934696745562131e-05, |
|
"loss": 0.9953, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 4.933464003944773e-05, |
|
"loss": 0.9811, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 4.9322312623274164e-05, |
|
"loss": 0.9679, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 4.930998520710059e-05, |
|
"loss": 0.9546, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 4.9297657790927026e-05, |
|
"loss": 0.9435, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 4.9285330374753454e-05, |
|
"loss": 0.9284, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 4.927300295857988e-05, |
|
"loss": 0.9222, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 7.85, |
|
"learning_rate": 4.926070019723866e-05, |
|
"loss": 0.913, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 4.924837278106509e-05, |
|
"loss": 0.9039, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 8.05, |
|
"learning_rate": 4.9236070019723866e-05, |
|
"loss": 0.8952, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 4.92237426035503e-05, |
|
"loss": 0.8863, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 4.921141518737673e-05, |
|
"loss": 0.8774, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 4.9199087771203155e-05, |
|
"loss": 0.8715, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 4.918676035502959e-05, |
|
"loss": 0.8636, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 4.917443293885602e-05, |
|
"loss": 0.8558, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 4.916210552268245e-05, |
|
"loss": 0.8472, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 4.914977810650888e-05, |
|
"loss": 0.8439, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 4.9137475345167654e-05, |
|
"loss": 0.8341, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"learning_rate": 4.912514792899409e-05, |
|
"loss": 0.8298, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 4.9112820512820516e-05, |
|
"loss": 0.8235, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"learning_rate": 4.910049309664694e-05, |
|
"loss": 0.8192, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 4.908816568047338e-05, |
|
"loss": 0.8103, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 4.907586291913215e-05, |
|
"loss": 0.8077, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 4.906353550295858e-05, |
|
"loss": 0.8009, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 4.905120808678501e-05, |
|
"loss": 0.7984, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 4.903888067061144e-05, |
|
"loss": 0.7924, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 4.9026553254437876e-05, |
|
"loss": 0.7877, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 9.81, |
|
"learning_rate": 4.9014225838264304e-05, |
|
"loss": 0.7824, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 4.900192307692308e-05, |
|
"loss": 0.7794, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 10.01, |
|
"learning_rate": 4.898962031558186e-05, |
|
"loss": 0.7749, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 10.11, |
|
"learning_rate": 4.897729289940829e-05, |
|
"loss": 0.7697, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 10.21, |
|
"learning_rate": 4.8964965483234716e-05, |
|
"loss": 0.7651, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 4.8952638067061144e-05, |
|
"loss": 0.7603, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 4.894031065088758e-05, |
|
"loss": 0.7583, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.892798323471401e-05, |
|
"loss": 0.755, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 4.891565581854043e-05, |
|
"loss": 0.7497, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 4.890332840236687e-05, |
|
"loss": 0.7457, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 10.79, |
|
"learning_rate": 4.8891000986193295e-05, |
|
"loss": 0.7429, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 10.89, |
|
"learning_rate": 4.887869822485207e-05, |
|
"loss": 0.7381, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 10.99, |
|
"learning_rate": 4.8866370808678504e-05, |
|
"loss": 0.7369, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 11.09, |
|
"learning_rate": 4.885406804733728e-05, |
|
"loss": 0.7321, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 11.19, |
|
"learning_rate": 4.8841740631163714e-05, |
|
"loss": 0.7293, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 11.29, |
|
"learning_rate": 4.882941321499014e-05, |
|
"loss": 0.7258, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.881708579881657e-05, |
|
"loss": 0.7243, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 4.8804758382643e-05, |
|
"loss": 0.7207, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 4.879243096646943e-05, |
|
"loss": 0.716, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 4.878010355029586e-05, |
|
"loss": 0.7123, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 4.876777613412229e-05, |
|
"loss": 0.7106, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 11.87, |
|
"learning_rate": 4.875544871794872e-05, |
|
"loss": 0.7086, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 11.97, |
|
"learning_rate": 4.8743121301775154e-05, |
|
"loss": 0.7058, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 12.07, |
|
"learning_rate": 4.873084319526628e-05, |
|
"loss": 0.7038, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 12.17, |
|
"learning_rate": 4.8718515779092704e-05, |
|
"loss": 0.7001, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 12.27, |
|
"learning_rate": 4.870618836291914e-05, |
|
"loss": 0.6989, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 12.37, |
|
"learning_rate": 4.869386094674556e-05, |
|
"loss": 0.6932, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 4.8681533530571994e-05, |
|
"loss": 0.6922, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 4.866920611439843e-05, |
|
"loss": 0.6894, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 4.8656878698224855e-05, |
|
"loss": 0.6862, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 4.864455128205128e-05, |
|
"loss": 0.6846, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 4.863224852071006e-05, |
|
"loss": 0.6832, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 12.95, |
|
"learning_rate": 4.861994575936884e-05, |
|
"loss": 0.6807, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 13.05, |
|
"learning_rate": 4.8607642998027616e-05, |
|
"loss": 0.678, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 13.15, |
|
"learning_rate": 4.859531558185404e-05, |
|
"loss": 0.6761, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 4.858298816568048e-05, |
|
"loss": 0.6738, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 13.35, |
|
"learning_rate": 4.8570660749506905e-05, |
|
"loss": 0.6713, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 4.855833333333333e-05, |
|
"loss": 0.6697, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 4.8546005917159767e-05, |
|
"loss": 0.6667, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 4.8533678500986194e-05, |
|
"loss": 0.6653, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 4.852135108481263e-05, |
|
"loss": 0.6634, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 4.8509023668639056e-05, |
|
"loss": 0.6603, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 4.849669625246548e-05, |
|
"loss": 0.6591, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 14.03, |
|
"learning_rate": 4.8484393491124265e-05, |
|
"loss": 0.6573, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 14.13, |
|
"learning_rate": 4.847206607495069e-05, |
|
"loss": 0.6543, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 14.23, |
|
"learning_rate": 4.845973865877712e-05, |
|
"loss": 0.6525, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 14.33, |
|
"learning_rate": 4.8447411242603555e-05, |
|
"loss": 0.6518, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 14.43, |
|
"learning_rate": 4.843508382642998e-05, |
|
"loss": 0.6494, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 14.52, |
|
"learning_rate": 4.8422756410256416e-05, |
|
"loss": 0.6478, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 4.841042899408284e-05, |
|
"loss": 0.646, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 14.72, |
|
"learning_rate": 4.839810157790927e-05, |
|
"loss": 0.6438, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 14.82, |
|
"learning_rate": 4.8385774161735706e-05, |
|
"loss": 0.6421, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 14.92, |
|
"learning_rate": 4.837349605522682e-05, |
|
"loss": 0.6417, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 15.01, |
|
"learning_rate": 4.8361168639053256e-05, |
|
"loss": 0.6416, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 15.11, |
|
"learning_rate": 4.834884122287969e-05, |
|
"loss": 0.637, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 15.21, |
|
"learning_rate": 4.833651380670612e-05, |
|
"loss": 0.6362, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"learning_rate": 4.8324186390532545e-05, |
|
"loss": 0.6355, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 15.41, |
|
"learning_rate": 4.831185897435897e-05, |
|
"loss": 0.6336, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 15.51, |
|
"learning_rate": 4.829953155818541e-05, |
|
"loss": 0.6304, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 15.6, |
|
"learning_rate": 4.828720414201184e-05, |
|
"loss": 0.6306, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 15.7, |
|
"learning_rate": 4.827487672583826e-05, |
|
"loss": 0.6271, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 15.8, |
|
"learning_rate": 4.8262573964497044e-05, |
|
"loss": 0.6278, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 15.9, |
|
"learning_rate": 4.825024654832347e-05, |
|
"loss": 0.6245, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 4.8237919132149906e-05, |
|
"loss": 0.6236, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 16.09, |
|
"learning_rate": 4.822561637080868e-05, |
|
"loss": 0.622, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 16.19, |
|
"learning_rate": 4.821328895463511e-05, |
|
"loss": 0.6209, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 16.29, |
|
"learning_rate": 4.820096153846154e-05, |
|
"loss": 0.6187, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 16.39, |
|
"learning_rate": 4.818863412228797e-05, |
|
"loss": 0.6189, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 16.49, |
|
"learning_rate": 4.8176331360946746e-05, |
|
"loss": 0.6159, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 16.58, |
|
"learning_rate": 4.816400394477318e-05, |
|
"loss": 0.615, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 16.68, |
|
"learning_rate": 4.815167652859961e-05, |
|
"loss": 0.6155, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 16.78, |
|
"learning_rate": 4.8139349112426035e-05, |
|
"loss": 0.6121, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 4.812702169625247e-05, |
|
"loss": 0.6115, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 16.98, |
|
"learning_rate": 4.8114718934911245e-05, |
|
"loss": 0.6113, |
|
"step": 86500 |
|
} |
|
], |
|
"max_steps": 2038000, |
|
"num_train_epochs": 400, |
|
"total_flos": 2.3340589932538233e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|