|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 6.999985466172516, |
|
"global_step": 120407, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 9.0919, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4e-05, |
|
"loss": 7.6432, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 6e-05, |
|
"loss": 6.7199, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8e-05, |
|
"loss": 6.1549, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.0001, |
|
"loss": 5.5391, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.818852065539323e-05, |
|
"loss": 4.4909, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.637704131078646e-05, |
|
"loss": 2.7203, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.456556196617969e-05, |
|
"loss": 1.5493, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 9.275408262157291e-05, |
|
"loss": 1.2876, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 9.094260327696614e-05, |
|
"loss": 1.1713, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 8.913112393235937e-05, |
|
"loss": 1.1049, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.73196445877526e-05, |
|
"loss": 1.0576, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 8.550816524314582e-05, |
|
"loss": 1.0238, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.369668589853905e-05, |
|
"loss": 0.9882, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.188520655393228e-05, |
|
"loss": 0.9338, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.00737272093255e-05, |
|
"loss": 0.895, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.826224786471873e-05, |
|
"loss": 0.8748, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 7.645076852011194e-05, |
|
"loss": 0.8617, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.463928917550518e-05, |
|
"loss": 0.8464, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.282780983089841e-05, |
|
"loss": 0.8373, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.101633048629164e-05, |
|
"loss": 0.8292, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 6.920485114168485e-05, |
|
"loss": 0.8215, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.739337179707809e-05, |
|
"loss": 0.8139, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 6.558189245247132e-05, |
|
"loss": 0.8065, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 6.377041310786455e-05, |
|
"loss": 0.8003, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 6.195893376325776e-05, |
|
"loss": 0.7953, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.0147454418650995e-05, |
|
"loss": 0.7864, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.833597507404422e-05, |
|
"loss": 0.7826, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 5.652449572943744e-05, |
|
"loss": 0.7781, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 5.4713016384830676e-05, |
|
"loss": 0.7744, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 5.2901537040223903e-05, |
|
"loss": 0.7678, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 5.109005769561713e-05, |
|
"loss": 0.7646, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 4.927857835101036e-05, |
|
"loss": 0.7621, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 4.746709900640358e-05, |
|
"loss": 0.7588, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 4.565561966179681e-05, |
|
"loss": 0.7535, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 4.384414031719003e-05, |
|
"loss": 0.7494, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 4.203266097258326e-05, |
|
"loss": 0.7466, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 4.022118162797649e-05, |
|
"loss": 0.7426, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 3.8409702283369714e-05, |
|
"loss": 0.7402, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 3.659822293876294e-05, |
|
"loss": 0.7386, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 3.478674359415617e-05, |
|
"loss": 0.7348, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 3.2975264249549395e-05, |
|
"loss": 0.7331, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.116378490494262e-05, |
|
"loss": 0.7307, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 2.9352305560335853e-05, |
|
"loss": 0.7258, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.7540826215729077e-05, |
|
"loss": 0.7234, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.57293468711223e-05, |
|
"loss": 0.7208, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 2.391786752651553e-05, |
|
"loss": 0.7179, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.2106388181908758e-05, |
|
"loss": 0.7188, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.0294908837301985e-05, |
|
"loss": 0.7158, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 1.8483429492695212e-05, |
|
"loss": 0.7146, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 1.6671950148088436e-05, |
|
"loss": 0.7125, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 1.4860470803481663e-05, |
|
"loss": 0.7092, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 1.304899145887489e-05, |
|
"loss": 0.7061, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 1.1237512114268117e-05, |
|
"loss": 0.7064, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 9.426032769661344e-06, |
|
"loss": 0.7034, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 7.614553425054572e-06, |
|
"loss": 0.7027, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 5.803074080447798e-06, |
|
"loss": 0.7015, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.991594735841025e-06, |
|
"loss": 0.7004, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 2.1801153912342513e-06, |
|
"loss": 0.7, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.6863604662747836e-07, |
|
"loss": 0.6989, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"step": 120407, |
|
"total_flos": 3.3047861122616525e+17, |
|
"train_loss": 1.4238887095158894, |
|
"train_runtime": 60363.921, |
|
"train_samples_per_second": 31.915, |
|
"train_steps_per_second": 1.995 |
|
} |
|
], |
|
"max_steps": 120407, |
|
"num_train_epochs": 7, |
|
"total_flos": 3.3047861122616525e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|