|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.5623226520866496, |
|
"eval_steps": 100000000, |
|
"global_step": 78000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 10.7586, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-05, |
|
"loss": 6.5292, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.99998214600225e-05, |
|
"loss": 4.7262, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.9999285842640105e-05, |
|
"loss": 4.1997, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.999839315550315e-05, |
|
"loss": 3.8892, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9997143411362066e-05, |
|
"loss": 3.6618, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9995536628067196e-05, |
|
"loss": 3.5132, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.9993572828568536e-05, |
|
"loss": 3.4092, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.999125204091544e-05, |
|
"loss": 3.3295, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 4.998857429825615e-05, |
|
"loss": 3.2596, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.9985539638837424e-05, |
|
"loss": 3.2068, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.998214810600389e-05, |
|
"loss": 3.163, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 4.997839974819749e-05, |
|
"loss": 3.1256, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.997429461895675e-05, |
|
"loss": 3.0844, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996983277691606e-05, |
|
"loss": 3.0541, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.996501428580478e-05, |
|
"loss": 3.0293, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.995983921444638e-05, |
|
"loss": 2.999, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.9954307636757434e-05, |
|
"loss": 2.9744, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 4.994841963174656e-05, |
|
"loss": 2.9525, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.9942175283513294e-05, |
|
"loss": 2.9321, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.993557468124691e-05, |
|
"loss": 2.9237, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.9928617919225106e-05, |
|
"loss": 2.9021, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.99213050968127e-05, |
|
"loss": 2.8766, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 4.991363631846019e-05, |
|
"loss": 2.8691, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.990561169370224e-05, |
|
"loss": 2.8531, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.989723133715618e-05, |
|
"loss": 2.8366, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.988849536852028e-05, |
|
"loss": 2.8265, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.987940391257212e-05, |
|
"loss": 2.8099, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.986995709916677e-05, |
|
"loss": 2.8009, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 4.9860155063234944e-05, |
|
"loss": 2.7881, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9849997944781055e-05, |
|
"loss": 2.7794, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.9839485888881235e-05, |
|
"loss": 2.7674, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.982861904568127e-05, |
|
"loss": 2.7559, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.981739757039443e-05, |
|
"loss": 2.7455, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 4.9805821623299285e-05, |
|
"loss": 2.7351, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.979389136973737e-05, |
|
"loss": 2.7317, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.978160698011085e-05, |
|
"loss": 2.7192, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.976896862988012e-05, |
|
"loss": 2.709, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.975597649956122e-05, |
|
"loss": 2.7043, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.974263077472334e-05, |
|
"loss": 2.6954, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.97289316459861e-05, |
|
"loss": 2.6905, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.9714879309016874e-05, |
|
"loss": 2.6782, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.970047396452798e-05, |
|
"loss": 2.6751, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 4.968571581827381e-05, |
|
"loss": 2.6659, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9670605081047886e-05, |
|
"loss": 2.6594, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.9655141968679866e-05, |
|
"loss": 2.6553, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.963932670203245e-05, |
|
"loss": 2.6499, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.962315950699822e-05, |
|
"loss": 2.6391, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 4.960664061449643e-05, |
|
"loss": 2.6313, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.958977026046971e-05, |
|
"loss": 2.6318, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.957254868588065e-05, |
|
"loss": 2.6275, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.955497613670842e-05, |
|
"loss": 2.6192, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.953705286394522e-05, |
|
"loss": 2.6153, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9518779123592705e-05, |
|
"loss": 2.6102, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 4.9500155176658345e-05, |
|
"loss": 2.601, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9481181289151655e-05, |
|
"loss": 2.5991, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 4.9461857732080436e-05, |
|
"loss": 2.5946, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9442184781446876e-05, |
|
"loss": 2.5911, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.9422162718243626e-05, |
|
"loss": 2.5827, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.94017918284498e-05, |
|
"loss": 2.5785, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.938107240302683e-05, |
|
"loss": 2.5757, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.93600047379144e-05, |
|
"loss": 2.5705, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 4.9338589134026124e-05, |
|
"loss": 2.5703, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.931682589724534e-05, |
|
"loss": 2.5593, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.929471533842065e-05, |
|
"loss": 2.5647, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.9272257773361574e-05, |
|
"loss": 2.5564, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.924945352283394e-05, |
|
"loss": 2.5492, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 4.922630291255539e-05, |
|
"loss": 2.5509, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.920280627319067e-05, |
|
"loss": 2.5466, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.917896394034696e-05, |
|
"loss": 2.5412, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.915477625456899e-05, |
|
"loss": 2.5383, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.91302435613343e-05, |
|
"loss": 2.5364, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.91053662110482e-05, |
|
"loss": 2.5292, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.908014455903881e-05, |
|
"loss": 2.5304, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.905457896555199e-05, |
|
"loss": 2.5257, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.902866979574618e-05, |
|
"loss": 2.5189, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.9002417419687183e-05, |
|
"loss": 2.522, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.897582221234289e-05, |
|
"loss": 2.5141, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8948884553577926e-05, |
|
"loss": 2.517, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 4.8921604828148206e-05, |
|
"loss": 2.5077, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8893983425695445e-05, |
|
"loss": 2.5068, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 4.8866020740741626e-05, |
|
"loss": 2.5048, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.883771717268331e-05, |
|
"loss": 2.4972, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8809073125785964e-05, |
|
"loss": 2.4999, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.8780089009178206e-05, |
|
"loss": 2.4941, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8750765236845894e-05, |
|
"loss": 2.4891, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.8721102227626304e-05, |
|
"loss": 2.4876, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 4.869110040520206e-05, |
|
"loss": 2.4859, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8660760198095145e-05, |
|
"loss": 2.4833, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.8630082039660744e-05, |
|
"loss": 2.4825, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.859906636808108e-05, |
|
"loss": 2.4801, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.8567713626359134e-05, |
|
"loss": 2.4763, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 4.853602426231234e-05, |
|
"loss": 2.4742, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.850399872856615e-05, |
|
"loss": 2.4702, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.847163748254762e-05, |
|
"loss": 2.4679, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.843894098647884e-05, |
|
"loss": 2.4676, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.840590970737035e-05, |
|
"loss": 2.4679, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8372544117014436e-05, |
|
"loss": 2.4644, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.8338844691978456e-05, |
|
"loss": 2.4622, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.830481191359797e-05, |
|
"loss": 2.4605, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.82704462679699e-05, |
|
"loss": 2.4552, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 4.823574824594558e-05, |
|
"loss": 2.4501, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.820071834312372e-05, |
|
"loss": 2.4563, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.816535705984337e-05, |
|
"loss": 2.4467, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.812966490117676e-05, |
|
"loss": 2.4434, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8093642376922057e-05, |
|
"loss": 2.4422, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 4.8057290001596106e-05, |
|
"loss": 2.4462, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.802060829442711e-05, |
|
"loss": 2.4424, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.798359777934714e-05, |
|
"loss": 2.4373, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.7946258984984735e-05, |
|
"loss": 2.4381, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.7908592444657287e-05, |
|
"loss": 2.4334, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.787059869636346e-05, |
|
"loss": 2.4328, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 4.783227828277549e-05, |
|
"loss": 2.4284, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.779363175123144e-05, |
|
"loss": 2.4278, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.7754659653727384e-05, |
|
"loss": 2.4231, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.771536254690951e-05, |
|
"loss": 2.4212, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.767574099206619e-05, |
|
"loss": 2.4247, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 4.763579555511993e-05, |
|
"loss": 2.4209, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.759552680661933e-05, |
|
"loss": 2.4224, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.755493532173092e-05, |
|
"loss": 2.4204, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 4.75140216802309e-05, |
|
"loss": 2.4201, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7472786466496944e-05, |
|
"loss": 2.4127, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7431230269499764e-05, |
|
"loss": 2.4146, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.7389353682794765e-05, |
|
"loss": 2.4074, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7347157304513544e-05, |
|
"loss": 2.411, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7304641737355326e-05, |
|
"loss": 2.411, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.7261807588578375e-05, |
|
"loss": 2.4068, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 0.45778831291669553, |
|
"grad_norm": 0.4898933470249176, |
|
"learning_rate": 4.721865546999135e-05, |
|
"loss": 2.3865, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 0.4613929453018663, |
|
"grad_norm": 0.5031083226203918, |
|
"learning_rate": 4.7175185997944485e-05, |
|
"loss": 2.3853, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 0.46499757768703714, |
|
"grad_norm": 0.5099691152572632, |
|
"learning_rate": 4.713139979332089e-05, |
|
"loss": 2.3843, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 0.468602210072208, |
|
"grad_norm": 0.528971791267395, |
|
"learning_rate": 4.708729748152759e-05, |
|
"loss": 2.3867, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 0.4722068424573788, |
|
"grad_norm": 0.5447410941123962, |
|
"learning_rate": 4.7042879692486655e-05, |
|
"loss": 2.3871, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 0.47581147484254965, |
|
"grad_norm": 0.5187677145004272, |
|
"learning_rate": 4.699814706062616e-05, |
|
"loss": 2.3821, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 0.4794161072277205, |
|
"grad_norm": 0.5079197883605957, |
|
"learning_rate": 4.6953100224871163e-05, |
|
"loss": 2.3846, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 0.4830207396128913, |
|
"grad_norm": 0.5204797387123108, |
|
"learning_rate": 4.6907739828634536e-05, |
|
"loss": 2.3867, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 0.48662537199806216, |
|
"grad_norm": 0.51589435338974, |
|
"learning_rate": 4.6862066519807816e-05, |
|
"loss": 2.3792, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 0.490230004383233, |
|
"grad_norm": 0.5342502593994141, |
|
"learning_rate": 4.681608095075192e-05, |
|
"loss": 2.3767, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 0.49383463676840383, |
|
"grad_norm": 0.5265562534332275, |
|
"learning_rate": 4.676978377828785e-05, |
|
"loss": 2.3839, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 0.4974392691535746, |
|
"grad_norm": 0.5328825116157532, |
|
"learning_rate": 4.67231756636873e-05, |
|
"loss": 2.3813, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 0.5010439015387455, |
|
"grad_norm": 0.4992935061454773, |
|
"learning_rate": 4.6676257272663194e-05, |
|
"loss": 2.377, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 0.5046485339239163, |
|
"grad_norm": 0.5137242674827576, |
|
"learning_rate": 4.662902927536022e-05, |
|
"loss": 2.3798, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 0.5082531663090871, |
|
"grad_norm": 0.5188072323799133, |
|
"learning_rate": 4.658149234634523e-05, |
|
"loss": 2.3783, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 0.511857798694258, |
|
"grad_norm": 0.5286343693733215, |
|
"learning_rate": 4.65336471645976e-05, |
|
"loss": 2.3719, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 0.5154624310794288, |
|
"grad_norm": 0.5098246335983276, |
|
"learning_rate": 4.648549441349953e-05, |
|
"loss": 2.3712, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 0.5190670634645996, |
|
"grad_norm": 0.528311014175415, |
|
"learning_rate": 4.643703478082632e-05, |
|
"loss": 2.37, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 0.5226716958497705, |
|
"grad_norm": 0.5106325745582581, |
|
"learning_rate": 4.638826895873651e-05, |
|
"loss": 2.3717, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 0.5262763282349413, |
|
"grad_norm": 0.5130262970924377, |
|
"learning_rate": 4.6339197643762e-05, |
|
"loss": 2.3669, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 0.5298809606201121, |
|
"grad_norm": 0.5350182056427002, |
|
"learning_rate": 4.628982153679811e-05, |
|
"loss": 2.3675, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 0.533485593005283, |
|
"grad_norm": 0.5258482098579407, |
|
"learning_rate": 4.6240141343093545e-05, |
|
"loss": 2.3673, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 0.5370902253904538, |
|
"grad_norm": 0.5219452977180481, |
|
"learning_rate": 4.6190157772240385e-05, |
|
"loss": 2.3644, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 0.5406948577756246, |
|
"grad_norm": 0.531566858291626, |
|
"learning_rate": 4.6139871538163856e-05, |
|
"loss": 2.366, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 0.5442994901607955, |
|
"grad_norm": 0.5164352655410767, |
|
"learning_rate": 4.608928335911222e-05, |
|
"loss": 2.3651, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 0.5479041225459663, |
|
"grad_norm": 0.558618426322937, |
|
"learning_rate": 4.603839395764646e-05, |
|
"loss": 2.3618, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 0.5515087549311372, |
|
"grad_norm": 0.5367295145988464, |
|
"learning_rate": 4.598720406062999e-05, |
|
"loss": 2.3581, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 0.5551133873163079, |
|
"grad_norm": 0.5240716934204102, |
|
"learning_rate": 4.593571439921825e-05, |
|
"loss": 2.3609, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 0.5587180197014787, |
|
"grad_norm": 0.5218440890312195, |
|
"learning_rate": 4.5883925708848275e-05, |
|
"loss": 2.3583, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 0.5623226520866496, |
|
"grad_norm": 0.5101985335350037, |
|
"learning_rate": 4.58318387292282e-05, |
|
"loss": 2.3587, |
|
"step": 78000 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 416130, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 3000, |
|
"total_flos": 2.8566582460416e+19, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|