Spaces:
Runtime error
Runtime error
{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 2.0, | |
"global_step": 97022, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.01, | |
"learning_rate": 1.25e-06, | |
"loss": 0.3381, | |
"step": 500 | |
}, | |
{ | |
"epoch": 0.02, | |
"learning_rate": 2.5e-06, | |
"loss": 0.1717, | |
"step": 1000 | |
}, | |
{ | |
"epoch": 0.03, | |
"learning_rate": 3.7500000000000005e-06, | |
"loss": 0.1167, | |
"step": 1500 | |
}, | |
{ | |
"epoch": 0.04, | |
"learning_rate": 5e-06, | |
"loss": 0.0962, | |
"step": 2000 | |
}, | |
{ | |
"epoch": 0.05, | |
"learning_rate": 4.9896073662987675e-06, | |
"loss": 0.0848, | |
"step": 2500 | |
}, | |
{ | |
"epoch": 0.06, | |
"learning_rate": 4.979214732597535e-06, | |
"loss": 0.0796, | |
"step": 3000 | |
}, | |
{ | |
"epoch": 0.07, | |
"learning_rate": 4.968822098896302e-06, | |
"loss": 0.0758, | |
"step": 3500 | |
}, | |
{ | |
"epoch": 0.08, | |
"learning_rate": 4.95842946519507e-06, | |
"loss": 0.0732, | |
"step": 4000 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 4.948036831493837e-06, | |
"loss": 0.071, | |
"step": 4500 | |
}, | |
{ | |
"epoch": 0.1, | |
"learning_rate": 4.937644197792605e-06, | |
"loss": 0.0697, | |
"step": 5000 | |
}, | |
{ | |
"epoch": 0.11, | |
"learning_rate": 4.927251564091372e-06, | |
"loss": 0.0681, | |
"step": 5500 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 4.91685893039014e-06, | |
"loss": 0.0678, | |
"step": 6000 | |
}, | |
{ | |
"epoch": 0.13, | |
"learning_rate": 4.906466296688907e-06, | |
"loss": 0.0656, | |
"step": 6500 | |
}, | |
{ | |
"epoch": 0.14, | |
"learning_rate": 4.896073662987675e-06, | |
"loss": 0.0651, | |
"step": 7000 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 4.885681029286442e-06, | |
"loss": 0.0644, | |
"step": 7500 | |
}, | |
{ | |
"epoch": 0.16, | |
"learning_rate": 4.87528839558521e-06, | |
"loss": 0.0638, | |
"step": 8000 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 4.864895761883977e-06, | |
"loss": 0.0627, | |
"step": 8500 | |
}, | |
{ | |
"epoch": 0.19, | |
"learning_rate": 4.854503128182744e-06, | |
"loss": 0.0622, | |
"step": 9000 | |
}, | |
{ | |
"epoch": 0.2, | |
"learning_rate": 4.844110494481512e-06, | |
"loss": 0.0615, | |
"step": 9500 | |
}, | |
{ | |
"epoch": 0.21, | |
"learning_rate": 4.833717860780279e-06, | |
"loss": 0.061, | |
"step": 10000 | |
}, | |
{ | |
"epoch": 0.22, | |
"learning_rate": 4.823325227079046e-06, | |
"loss": 0.0607, | |
"step": 10500 | |
}, | |
{ | |
"epoch": 0.23, | |
"learning_rate": 4.812932593377814e-06, | |
"loss": 0.0603, | |
"step": 11000 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 4.802539959676581e-06, | |
"loss": 0.0597, | |
"step": 11500 | |
}, | |
{ | |
"epoch": 0.25, | |
"learning_rate": 4.792147325975349e-06, | |
"loss": 0.0593, | |
"step": 12000 | |
}, | |
{ | |
"epoch": 0.26, | |
"learning_rate": 4.781754692274117e-06, | |
"loss": 0.0588, | |
"step": 12500 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 4.771362058572884e-06, | |
"loss": 0.0587, | |
"step": 13000 | |
}, | |
{ | |
"epoch": 0.28, | |
"learning_rate": 4.760969424871652e-06, | |
"loss": 0.058, | |
"step": 13500 | |
}, | |
{ | |
"epoch": 0.29, | |
"learning_rate": 4.750576791170419e-06, | |
"loss": 0.0578, | |
"step": 14000 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 4.740184157469187e-06, | |
"loss": 0.0574, | |
"step": 14500 | |
}, | |
{ | |
"epoch": 0.31, | |
"learning_rate": 4.729791523767954e-06, | |
"loss": 0.0572, | |
"step": 15000 | |
}, | |
{ | |
"epoch": 0.32, | |
"learning_rate": 4.719398890066721e-06, | |
"loss": 0.0566, | |
"step": 15500 | |
}, | |
{ | |
"epoch": 0.33, | |
"learning_rate": 4.709006256365489e-06, | |
"loss": 0.0566, | |
"step": 16000 | |
}, | |
{ | |
"epoch": 0.34, | |
"learning_rate": 4.698613622664256e-06, | |
"loss": 0.0562, | |
"step": 16500 | |
}, | |
{ | |
"epoch": 0.35, | |
"learning_rate": 4.688220988963023e-06, | |
"loss": 0.0555, | |
"step": 17000 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 4.677828355261791e-06, | |
"loss": 0.0554, | |
"step": 17500 | |
}, | |
{ | |
"epoch": 0.37, | |
"learning_rate": 4.667435721560558e-06, | |
"loss": 0.0559, | |
"step": 18000 | |
}, | |
{ | |
"epoch": 0.38, | |
"learning_rate": 4.657043087859326e-06, | |
"loss": 0.0553, | |
"step": 18500 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 4.646650454158093e-06, | |
"loss": 0.0548, | |
"step": 19000 | |
}, | |
{ | |
"epoch": 0.4, | |
"learning_rate": 4.636257820456861e-06, | |
"loss": 0.0548, | |
"step": 19500 | |
}, | |
{ | |
"epoch": 0.41, | |
"learning_rate": 4.6258651867556285e-06, | |
"loss": 0.0544, | |
"step": 20000 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 4.6154725530543956e-06, | |
"loss": 0.0542, | |
"step": 20500 | |
}, | |
{ | |
"epoch": 0.43, | |
"learning_rate": 4.6050799193531635e-06, | |
"loss": 0.0542, | |
"step": 21000 | |
}, | |
{ | |
"epoch": 0.44, | |
"learning_rate": 4.5946872856519305e-06, | |
"loss": 0.0539, | |
"step": 21500 | |
}, | |
{ | |
"epoch": 0.45, | |
"learning_rate": 4.5842946519506976e-06, | |
"loss": 0.0538, | |
"step": 22000 | |
}, | |
{ | |
"epoch": 0.46, | |
"learning_rate": 4.5739020182494655e-06, | |
"loss": 0.0535, | |
"step": 22500 | |
}, | |
{ | |
"epoch": 0.47, | |
"learning_rate": 4.5635093845482325e-06, | |
"loss": 0.0534, | |
"step": 23000 | |
}, | |
{ | |
"epoch": 0.48, | |
"learning_rate": 4.5531167508469996e-06, | |
"loss": 0.0535, | |
"step": 23500 | |
}, | |
{ | |
"epoch": 0.49, | |
"learning_rate": 4.5427241171457675e-06, | |
"loss": 0.0533, | |
"step": 24000 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 4.5323314834445345e-06, | |
"loss": 0.053, | |
"step": 24500 | |
}, | |
{ | |
"epoch": 0.52, | |
"learning_rate": 4.521938849743302e-06, | |
"loss": 0.0529, | |
"step": 25000 | |
}, | |
{ | |
"epoch": 0.53, | |
"learning_rate": 4.5115462160420694e-06, | |
"loss": 0.0527, | |
"step": 25500 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 4.501153582340837e-06, | |
"loss": 0.0524, | |
"step": 26000 | |
}, | |
{ | |
"epoch": 0.55, | |
"learning_rate": 4.490760948639604e-06, | |
"loss": 0.052, | |
"step": 26500 | |
}, | |
{ | |
"epoch": 0.56, | |
"learning_rate": 4.480368314938372e-06, | |
"loss": 0.0522, | |
"step": 27000 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 4.469975681237139e-06, | |
"loss": 0.0522, | |
"step": 27500 | |
}, | |
{ | |
"epoch": 0.58, | |
"learning_rate": 4.459583047535907e-06, | |
"loss": 0.0519, | |
"step": 28000 | |
}, | |
{ | |
"epoch": 0.59, | |
"learning_rate": 4.449190413834674e-06, | |
"loss": 0.0521, | |
"step": 28500 | |
}, | |
{ | |
"epoch": 0.6, | |
"learning_rate": 4.438797780133442e-06, | |
"loss": 0.0518, | |
"step": 29000 | |
}, | |
{ | |
"epoch": 0.61, | |
"learning_rate": 4.428405146432209e-06, | |
"loss": 0.0515, | |
"step": 29500 | |
}, | |
{ | |
"epoch": 0.62, | |
"learning_rate": 4.418012512730976e-06, | |
"loss": 0.0514, | |
"step": 30000 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 4.407619879029744e-06, | |
"loss": 0.0511, | |
"step": 30500 | |
}, | |
{ | |
"epoch": 0.64, | |
"learning_rate": 4.397227245328511e-06, | |
"loss": 0.0512, | |
"step": 31000 | |
}, | |
{ | |
"epoch": 0.65, | |
"learning_rate": 4.386834611627279e-06, | |
"loss": 0.0509, | |
"step": 31500 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 4.376441977926046e-06, | |
"loss": 0.0513, | |
"step": 32000 | |
}, | |
{ | |
"epoch": 0.67, | |
"learning_rate": 4.366049344224814e-06, | |
"loss": 0.0507, | |
"step": 32500 | |
}, | |
{ | |
"epoch": 0.68, | |
"learning_rate": 4.355656710523581e-06, | |
"loss": 0.0506, | |
"step": 33000 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 4.345264076822349e-06, | |
"loss": 0.0503, | |
"step": 33500 | |
}, | |
{ | |
"epoch": 0.7, | |
"learning_rate": 4.334871443121116e-06, | |
"loss": 0.0501, | |
"step": 34000 | |
}, | |
{ | |
"epoch": 0.71, | |
"learning_rate": 4.324478809419884e-06, | |
"loss": 0.0502, | |
"step": 34500 | |
}, | |
{ | |
"epoch": 0.72, | |
"learning_rate": 4.314086175718651e-06, | |
"loss": 0.0505, | |
"step": 35000 | |
}, | |
{ | |
"epoch": 0.73, | |
"learning_rate": 4.303693542017419e-06, | |
"loss": 0.0505, | |
"step": 35500 | |
}, | |
{ | |
"epoch": 0.74, | |
"learning_rate": 4.293300908316186e-06, | |
"loss": 0.0501, | |
"step": 36000 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 4.282908274614953e-06, | |
"loss": 0.05, | |
"step": 36500 | |
}, | |
{ | |
"epoch": 0.76, | |
"learning_rate": 4.272515640913721e-06, | |
"loss": 0.05, | |
"step": 37000 | |
}, | |
{ | |
"epoch": 0.77, | |
"learning_rate": 4.262123007212488e-06, | |
"loss": 0.0498, | |
"step": 37500 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 4.251730373511256e-06, | |
"loss": 0.0498, | |
"step": 38000 | |
}, | |
{ | |
"epoch": 0.79, | |
"learning_rate": 4.241337739810023e-06, | |
"loss": 0.0496, | |
"step": 38500 | |
}, | |
{ | |
"epoch": 0.8, | |
"learning_rate": 4.23094510610879e-06, | |
"loss": 0.0496, | |
"step": 39000 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 4.220552472407558e-06, | |
"loss": 0.0493, | |
"step": 39500 | |
}, | |
{ | |
"epoch": 0.82, | |
"learning_rate": 4.210159838706326e-06, | |
"loss": 0.0495, | |
"step": 40000 | |
}, | |
{ | |
"epoch": 0.83, | |
"learning_rate": 4.199767205005093e-06, | |
"loss": 0.049, | |
"step": 40500 | |
}, | |
{ | |
"epoch": 0.85, | |
"learning_rate": 4.189374571303861e-06, | |
"loss": 0.0493, | |
"step": 41000 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 4.178981937602628e-06, | |
"loss": 0.0491, | |
"step": 41500 | |
}, | |
{ | |
"epoch": 0.87, | |
"learning_rate": 4.1685893039013956e-06, | |
"loss": 0.0492, | |
"step": 42000 | |
}, | |
{ | |
"epoch": 0.88, | |
"learning_rate": 4.158196670200163e-06, | |
"loss": 0.0492, | |
"step": 42500 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 4.14780403649893e-06, | |
"loss": 0.0487, | |
"step": 43000 | |
}, | |
{ | |
"epoch": 0.9, | |
"learning_rate": 4.1374114027976976e-06, | |
"loss": 0.049, | |
"step": 43500 | |
}, | |
{ | |
"epoch": 0.91, | |
"learning_rate": 4.127018769096465e-06, | |
"loss": 0.0487, | |
"step": 44000 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 4.116626135395232e-06, | |
"loss": 0.0489, | |
"step": 44500 | |
}, | |
{ | |
"epoch": 0.93, | |
"learning_rate": 4.1062335016939995e-06, | |
"loss": 0.0485, | |
"step": 45000 | |
}, | |
{ | |
"epoch": 0.94, | |
"learning_rate": 4.095840867992767e-06, | |
"loss": 0.0486, | |
"step": 45500 | |
}, | |
{ | |
"epoch": 0.95, | |
"learning_rate": 4.0854482342915345e-06, | |
"loss": 0.0485, | |
"step": 46000 | |
}, | |
{ | |
"epoch": 0.96, | |
"learning_rate": 4.0750556005903015e-06, | |
"loss": 0.0487, | |
"step": 46500 | |
}, | |
{ | |
"epoch": 0.97, | |
"learning_rate": 4.0646629668890694e-06, | |
"loss": 0.0482, | |
"step": 47000 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 4.054270333187837e-06, | |
"loss": 0.0482, | |
"step": 47500 | |
}, | |
{ | |
"epoch": 0.99, | |
"learning_rate": 4.043877699486604e-06, | |
"loss": 0.0481, | |
"step": 48000 | |
}, | |
{ | |
"epoch": 1.0, | |
"learning_rate": 4.033485065785372e-06, | |
"loss": 0.0481, | |
"step": 48500 | |
}, | |
{ | |
"epoch": 1.0, | |
"eval_loss": 0.04874278977513313, | |
"eval_runtime": 1479.0093, | |
"eval_samples_per_second": 13.382, | |
"eval_steps_per_second": 0.836, | |
"step": 48511 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 4.023092432084139e-06, | |
"loss": 0.0479, | |
"step": 49000 | |
}, | |
{ | |
"epoch": 1.02, | |
"learning_rate": 4.012699798382906e-06, | |
"loss": 0.0478, | |
"step": 49500 | |
}, | |
{ | |
"epoch": 1.03, | |
"learning_rate": 4.002307164681674e-06, | |
"loss": 0.048, | |
"step": 50000 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 3.991914530980441e-06, | |
"loss": 0.0477, | |
"step": 50500 | |
}, | |
{ | |
"epoch": 1.05, | |
"learning_rate": 3.981521897279208e-06, | |
"loss": 0.0478, | |
"step": 51000 | |
}, | |
{ | |
"epoch": 1.06, | |
"learning_rate": 3.971129263577976e-06, | |
"loss": 0.0476, | |
"step": 51500 | |
}, | |
{ | |
"epoch": 1.07, | |
"learning_rate": 3.960736629876743e-06, | |
"loss": 0.0475, | |
"step": 52000 | |
}, | |
{ | |
"epoch": 1.08, | |
"learning_rate": 3.950343996175511e-06, | |
"loss": 0.0473, | |
"step": 52500 | |
}, | |
{ | |
"epoch": 1.09, | |
"learning_rate": 3.939951362474278e-06, | |
"loss": 0.0477, | |
"step": 53000 | |
}, | |
{ | |
"epoch": 1.1, | |
"learning_rate": 3.929558728773046e-06, | |
"loss": 0.0472, | |
"step": 53500 | |
}, | |
{ | |
"epoch": 1.11, | |
"learning_rate": 3.919166095071813e-06, | |
"loss": 0.0471, | |
"step": 54000 | |
}, | |
{ | |
"epoch": 1.12, | |
"learning_rate": 3.908773461370581e-06, | |
"loss": 0.0472, | |
"step": 54500 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 3.898380827669349e-06, | |
"loss": 0.0472, | |
"step": 55000 | |
}, | |
{ | |
"epoch": 1.14, | |
"learning_rate": 3.887988193968116e-06, | |
"loss": 0.0471, | |
"step": 55500 | |
}, | |
{ | |
"epoch": 1.15, | |
"learning_rate": 3.877595560266883e-06, | |
"loss": 0.0472, | |
"step": 56000 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 3.867202926565651e-06, | |
"loss": 0.0471, | |
"step": 56500 | |
}, | |
{ | |
"epoch": 1.17, | |
"learning_rate": 3.856810292864418e-06, | |
"loss": 0.047, | |
"step": 57000 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 3.846417659163185e-06, | |
"loss": 0.0472, | |
"step": 57500 | |
}, | |
{ | |
"epoch": 1.2, | |
"learning_rate": 3.836025025461953e-06, | |
"loss": 0.0471, | |
"step": 58000 | |
}, | |
{ | |
"epoch": 1.21, | |
"learning_rate": 3.82563239176072e-06, | |
"loss": 0.0468, | |
"step": 58500 | |
}, | |
{ | |
"epoch": 1.22, | |
"learning_rate": 3.815239758059488e-06, | |
"loss": 0.047, | |
"step": 59000 | |
}, | |
{ | |
"epoch": 1.23, | |
"learning_rate": 3.8048471243582554e-06, | |
"loss": 0.0467, | |
"step": 59500 | |
}, | |
{ | |
"epoch": 1.24, | |
"learning_rate": 3.7944544906570224e-06, | |
"loss": 0.0465, | |
"step": 60000 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 3.7840618569557903e-06, | |
"loss": 0.0467, | |
"step": 60500 | |
}, | |
{ | |
"epoch": 1.26, | |
"learning_rate": 3.7736692232545574e-06, | |
"loss": 0.0465, | |
"step": 61000 | |
}, | |
{ | |
"epoch": 1.27, | |
"learning_rate": 3.763276589553325e-06, | |
"loss": 0.0467, | |
"step": 61500 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 3.7528839558520923e-06, | |
"loss": 0.0468, | |
"step": 62000 | |
}, | |
{ | |
"epoch": 1.29, | |
"learning_rate": 3.7424913221508598e-06, | |
"loss": 0.0469, | |
"step": 62500 | |
}, | |
{ | |
"epoch": 1.3, | |
"learning_rate": 3.7320986884496272e-06, | |
"loss": 0.0465, | |
"step": 63000 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 3.7217060547483947e-06, | |
"loss": 0.0462, | |
"step": 63500 | |
}, | |
{ | |
"epoch": 1.32, | |
"learning_rate": 3.7113134210471618e-06, | |
"loss": 0.0467, | |
"step": 64000 | |
}, | |
{ | |
"epoch": 1.33, | |
"learning_rate": 3.7009207873459297e-06, | |
"loss": 0.0465, | |
"step": 64500 | |
}, | |
{ | |
"epoch": 1.34, | |
"learning_rate": 3.6905281536446967e-06, | |
"loss": 0.0464, | |
"step": 65000 | |
}, | |
{ | |
"epoch": 1.35, | |
"learning_rate": 3.6801355199434646e-06, | |
"loss": 0.0465, | |
"step": 65500 | |
}, | |
{ | |
"epoch": 1.36, | |
"learning_rate": 3.6697428862422316e-06, | |
"loss": 0.0465, | |
"step": 66000 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 3.659350252540999e-06, | |
"loss": 0.0463, | |
"step": 66500 | |
}, | |
{ | |
"epoch": 1.38, | |
"learning_rate": 3.648957618839767e-06, | |
"loss": 0.0461, | |
"step": 67000 | |
}, | |
{ | |
"epoch": 1.39, | |
"learning_rate": 3.638564985138534e-06, | |
"loss": 0.0461, | |
"step": 67500 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 3.628172351437301e-06, | |
"loss": 0.0462, | |
"step": 68000 | |
}, | |
{ | |
"epoch": 1.41, | |
"learning_rate": 3.617779717736069e-06, | |
"loss": 0.0459, | |
"step": 68500 | |
}, | |
{ | |
"epoch": 1.42, | |
"learning_rate": 3.6073870840348365e-06, | |
"loss": 0.0462, | |
"step": 69000 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 3.596994450333604e-06, | |
"loss": 0.0459, | |
"step": 69500 | |
}, | |
{ | |
"epoch": 1.44, | |
"learning_rate": 3.5866018166323714e-06, | |
"loss": 0.0459, | |
"step": 70000 | |
}, | |
{ | |
"epoch": 1.45, | |
"learning_rate": 3.5762091829311385e-06, | |
"loss": 0.0457, | |
"step": 70500 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 3.5658165492299064e-06, | |
"loss": 0.0457, | |
"step": 71000 | |
}, | |
{ | |
"epoch": 1.47, | |
"learning_rate": 3.5554239155286734e-06, | |
"loss": 0.0456, | |
"step": 71500 | |
}, | |
{ | |
"epoch": 1.48, | |
"learning_rate": 3.5450312818274413e-06, | |
"loss": 0.0457, | |
"step": 72000 | |
}, | |
{ | |
"epoch": 1.49, | |
"learning_rate": 3.5346386481262083e-06, | |
"loss": 0.0461, | |
"step": 72500 | |
}, | |
{ | |
"epoch": 1.5, | |
"learning_rate": 3.524246014424976e-06, | |
"loss": 0.046, | |
"step": 73000 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 3.5138533807237433e-06, | |
"loss": 0.0456, | |
"step": 73500 | |
}, | |
{ | |
"epoch": 1.53, | |
"learning_rate": 3.5034607470225108e-06, | |
"loss": 0.0456, | |
"step": 74000 | |
}, | |
{ | |
"epoch": 1.54, | |
"learning_rate": 3.493068113321278e-06, | |
"loss": 0.0458, | |
"step": 74500 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 3.4826754796200457e-06, | |
"loss": 0.0455, | |
"step": 75000 | |
}, | |
{ | |
"epoch": 1.56, | |
"learning_rate": 3.4722828459188128e-06, | |
"loss": 0.0458, | |
"step": 75500 | |
}, | |
{ | |
"epoch": 1.57, | |
"learning_rate": 3.4618902122175806e-06, | |
"loss": 0.0455, | |
"step": 76000 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 3.451497578516348e-06, | |
"loss": 0.0459, | |
"step": 76500 | |
}, | |
{ | |
"epoch": 1.59, | |
"learning_rate": 3.441104944815115e-06, | |
"loss": 0.0455, | |
"step": 77000 | |
}, | |
{ | |
"epoch": 1.6, | |
"learning_rate": 3.430712311113883e-06, | |
"loss": 0.0457, | |
"step": 77500 | |
}, | |
{ | |
"epoch": 1.61, | |
"learning_rate": 3.42031967741265e-06, | |
"loss": 0.0456, | |
"step": 78000 | |
}, | |
{ | |
"epoch": 1.62, | |
"learning_rate": 3.409927043711417e-06, | |
"loss": 0.0453, | |
"step": 78500 | |
}, | |
{ | |
"epoch": 1.63, | |
"learning_rate": 3.399534410010185e-06, | |
"loss": 0.0454, | |
"step": 79000 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 3.3891417763089525e-06, | |
"loss": 0.0453, | |
"step": 79500 | |
}, | |
{ | |
"epoch": 1.65, | |
"learning_rate": 3.37874914260772e-06, | |
"loss": 0.0455, | |
"step": 80000 | |
}, | |
{ | |
"epoch": 1.66, | |
"learning_rate": 3.3683565089064875e-06, | |
"loss": 0.0453, | |
"step": 80500 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 3.3579638752052545e-06, | |
"loss": 0.0455, | |
"step": 81000 | |
}, | |
{ | |
"epoch": 1.68, | |
"learning_rate": 3.3475712415040224e-06, | |
"loss": 0.0451, | |
"step": 81500 | |
}, | |
{ | |
"epoch": 1.69, | |
"learning_rate": 3.3371786078027895e-06, | |
"loss": 0.0448, | |
"step": 82000 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 3.3267859741015574e-06, | |
"loss": 0.0453, | |
"step": 82500 | |
}, | |
{ | |
"epoch": 1.71, | |
"learning_rate": 3.3163933404003244e-06, | |
"loss": 0.045, | |
"step": 83000 | |
}, | |
{ | |
"epoch": 1.72, | |
"learning_rate": 3.306000706699092e-06, | |
"loss": 0.045, | |
"step": 83500 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 3.2956080729978598e-06, | |
"loss": 0.0453, | |
"step": 84000 | |
}, | |
{ | |
"epoch": 1.74, | |
"learning_rate": 3.285215439296627e-06, | |
"loss": 0.045, | |
"step": 84500 | |
}, | |
{ | |
"epoch": 1.75, | |
"learning_rate": 3.274822805595394e-06, | |
"loss": 0.0454, | |
"step": 85000 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 3.2644301718941618e-06, | |
"loss": 0.045, | |
"step": 85500 | |
}, | |
{ | |
"epoch": 1.77, | |
"learning_rate": 3.254037538192929e-06, | |
"loss": 0.0449, | |
"step": 86000 | |
}, | |
{ | |
"epoch": 1.78, | |
"learning_rate": 3.2436449044916967e-06, | |
"loss": 0.045, | |
"step": 86500 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 3.233252270790464e-06, | |
"loss": 0.0448, | |
"step": 87000 | |
}, | |
{ | |
"epoch": 1.8, | |
"learning_rate": 3.2228596370892312e-06, | |
"loss": 0.0449, | |
"step": 87500 | |
}, | |
{ | |
"epoch": 1.81, | |
"learning_rate": 3.212467003387999e-06, | |
"loss": 0.0446, | |
"step": 88000 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 3.202074369686766e-06, | |
"loss": 0.045, | |
"step": 88500 | |
}, | |
{ | |
"epoch": 1.83, | |
"learning_rate": 3.191681735985534e-06, | |
"loss": 0.0449, | |
"step": 89000 | |
}, | |
{ | |
"epoch": 1.84, | |
"learning_rate": 3.181289102284301e-06, | |
"loss": 0.0446, | |
"step": 89500 | |
}, | |
{ | |
"epoch": 1.86, | |
"learning_rate": 3.1708964685830686e-06, | |
"loss": 0.0445, | |
"step": 90000 | |
}, | |
{ | |
"epoch": 1.87, | |
"learning_rate": 3.160503834881836e-06, | |
"loss": 0.0448, | |
"step": 90500 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 3.1501112011806035e-06, | |
"loss": 0.0445, | |
"step": 91000 | |
}, | |
{ | |
"epoch": 1.89, | |
"learning_rate": 3.1397185674793706e-06, | |
"loss": 0.0447, | |
"step": 91500 | |
}, | |
{ | |
"epoch": 1.9, | |
"learning_rate": 3.1293259337781385e-06, | |
"loss": 0.0446, | |
"step": 92000 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 3.1189333000769055e-06, | |
"loss": 0.0449, | |
"step": 92500 | |
}, | |
{ | |
"epoch": 1.92, | |
"learning_rate": 3.1085406663756734e-06, | |
"loss": 0.0446, | |
"step": 93000 | |
}, | |
{ | |
"epoch": 1.93, | |
"learning_rate": 3.0981480326744404e-06, | |
"loss": 0.0442, | |
"step": 93500 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 3.087755398973208e-06, | |
"loss": 0.0445, | |
"step": 94000 | |
}, | |
{ | |
"epoch": 1.95, | |
"learning_rate": 3.077362765271976e-06, | |
"loss": 0.0444, | |
"step": 94500 | |
}, | |
{ | |
"epoch": 1.96, | |
"learning_rate": 3.066970131570743e-06, | |
"loss": 0.0444, | |
"step": 95000 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 3.05657749786951e-06, | |
"loss": 0.0444, | |
"step": 95500 | |
}, | |
{ | |
"epoch": 1.98, | |
"learning_rate": 3.046184864168278e-06, | |
"loss": 0.0444, | |
"step": 96000 | |
}, | |
{ | |
"epoch": 1.99, | |
"learning_rate": 3.0357922304670453e-06, | |
"loss": 0.0446, | |
"step": 96500 | |
}, | |
{ | |
"epoch": 2.0, | |
"learning_rate": 3.0253995967658127e-06, | |
"loss": 0.0443, | |
"step": 97000 | |
}, | |
{ | |
"epoch": 2.0, | |
"eval_loss": 0.04501689225435257, | |
"eval_runtime": 1478.7113, | |
"eval_samples_per_second": 13.385, | |
"eval_steps_per_second": 0.837, | |
"step": 97022 | |
} | |
], | |
"max_steps": 242555, | |
"num_train_epochs": 5, | |
"total_flos": 1.1940942833642373e+18, | |
"trial_name": null, | |
"trial_params": null | |
} | |