|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.475685234305924, |
|
"eval_steps": 100, |
|
"global_step": 2800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.08841732979664015, |
|
"eval_accuracy": 0.1989687629116883, |
|
"eval_loss": 4.78662633895874, |
|
"eval_runtime": 26.1083, |
|
"eval_samples_per_second": 16.7, |
|
"eval_steps_per_second": 0.536, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1768346595932803, |
|
"eval_accuracy": 0.330860012697261, |
|
"eval_loss": 4.04963493347168, |
|
"eval_runtime": 26.2098, |
|
"eval_samples_per_second": 16.635, |
|
"eval_steps_per_second": 0.534, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.26525198938992045, |
|
"eval_accuracy": 0.37791993303243177, |
|
"eval_loss": 3.652527332305908, |
|
"eval_runtime": 26.1862, |
|
"eval_samples_per_second": 16.65, |
|
"eval_steps_per_second": 0.535, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3536693191865606, |
|
"eval_accuracy": 0.42577323076228935, |
|
"eval_loss": 3.2409958839416504, |
|
"eval_runtime": 26.1526, |
|
"eval_samples_per_second": 16.671, |
|
"eval_steps_per_second": 0.535, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4420866489832007, |
|
"grad_norm": 195.0, |
|
"learning_rate": 4.263188918361332e-05, |
|
"loss": 3.9116, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.4420866489832007, |
|
"eval_accuracy": 0.39117474036699323, |
|
"eval_loss": 3.6305172443389893, |
|
"eval_runtime": 26.2335, |
|
"eval_samples_per_second": 16.62, |
|
"eval_steps_per_second": 0.534, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5305039787798409, |
|
"eval_accuracy": 0.4406458382696409, |
|
"eval_loss": 3.177022695541382, |
|
"eval_runtime": 26.2979, |
|
"eval_samples_per_second": 16.579, |
|
"eval_steps_per_second": 0.532, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.618921308576481, |
|
"eval_accuracy": 0.5199022247813888, |
|
"eval_loss": 2.4477944374084473, |
|
"eval_runtime": 26.2814, |
|
"eval_samples_per_second": 16.59, |
|
"eval_steps_per_second": 0.533, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7073386383731212, |
|
"eval_accuracy": 0.5508474097348983, |
|
"eval_loss": 2.238290548324585, |
|
"eval_runtime": 26.1919, |
|
"eval_samples_per_second": 16.646, |
|
"eval_steps_per_second": 0.535, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7957559681697612, |
|
"eval_accuracy": 0.5634974176979812, |
|
"eval_loss": 2.1547296047210693, |
|
"eval_runtime": 26.0445, |
|
"eval_samples_per_second": 16.741, |
|
"eval_steps_per_second": 0.538, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8841732979664014, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 3.526377836722665e-05, |
|
"loss": 2.4568, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.8841732979664014, |
|
"eval_accuracy": 0.5759096753818043, |
|
"eval_loss": 2.086787223815918, |
|
"eval_runtime": 26.1107, |
|
"eval_samples_per_second": 16.698, |
|
"eval_steps_per_second": 0.536, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9725906277630415, |
|
"eval_accuracy": 0.5819702172527925, |
|
"eval_loss": 2.0399203300476074, |
|
"eval_runtime": 26.206, |
|
"eval_samples_per_second": 16.637, |
|
"eval_steps_per_second": 0.534, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.0610079575596818, |
|
"eval_accuracy": 0.5872521470246358, |
|
"eval_loss": 2.0102362632751465, |
|
"eval_runtime": 26.1934, |
|
"eval_samples_per_second": 16.645, |
|
"eval_steps_per_second": 0.534, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.1494252873563218, |
|
"eval_accuracy": 0.5896846032104907, |
|
"eval_loss": 1.9805158376693726, |
|
"eval_runtime": 26.091, |
|
"eval_samples_per_second": 16.711, |
|
"eval_steps_per_second": 0.537, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.237842617152962, |
|
"eval_accuracy": 0.5955343118975784, |
|
"eval_loss": 1.9590190649032593, |
|
"eval_runtime": 26.2439, |
|
"eval_samples_per_second": 16.613, |
|
"eval_steps_per_second": 0.533, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.3262599469496021, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 2.7895667550839967e-05, |
|
"loss": 1.9305, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.3262599469496021, |
|
"eval_accuracy": 0.598231793709266, |
|
"eval_loss": 1.9380624294281006, |
|
"eval_runtime": 26.2429, |
|
"eval_samples_per_second": 16.614, |
|
"eval_steps_per_second": 0.533, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.4146772767462423, |
|
"eval_accuracy": 0.5994681725660957, |
|
"eval_loss": 1.92489492893219, |
|
"eval_runtime": 26.2375, |
|
"eval_samples_per_second": 16.617, |
|
"eval_steps_per_second": 0.534, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.5030946065428825, |
|
"eval_accuracy": 0.6016819917312428, |
|
"eval_loss": 1.922256588935852, |
|
"eval_runtime": 26.2419, |
|
"eval_samples_per_second": 16.615, |
|
"eval_steps_per_second": 0.533, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.5915119363395225, |
|
"eval_accuracy": 0.6037235246198407, |
|
"eval_loss": 1.9091354608535767, |
|
"eval_runtime": 26.0733, |
|
"eval_samples_per_second": 16.722, |
|
"eval_steps_per_second": 0.537, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.6799292661361627, |
|
"eval_accuracy": 0.6042424379334493, |
|
"eval_loss": 1.9038457870483398, |
|
"eval_runtime": 26.0298, |
|
"eval_samples_per_second": 16.75, |
|
"eval_steps_per_second": 0.538, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.7683465959328029, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 2.0527556734453286e-05, |
|
"loss": 1.8511, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.7683465959328029, |
|
"eval_accuracy": 0.6045487728273357, |
|
"eval_loss": 1.898189663887024, |
|
"eval_runtime": 26.1744, |
|
"eval_samples_per_second": 16.658, |
|
"eval_steps_per_second": 0.535, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.8567639257294428, |
|
"eval_accuracy": 0.60601247039271, |
|
"eval_loss": 1.8923863172531128, |
|
"eval_runtime": 26.2245, |
|
"eval_samples_per_second": 16.626, |
|
"eval_steps_per_second": 0.534, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.9451812555260832, |
|
"eval_accuracy": 0.6072253419074359, |
|
"eval_loss": 1.8843563795089722, |
|
"eval_runtime": 26.2182, |
|
"eval_samples_per_second": 16.63, |
|
"eval_steps_per_second": 0.534, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.033598585322723, |
|
"eval_accuracy": 0.6086919582677255, |
|
"eval_loss": 1.8872811794281006, |
|
"eval_runtime": 26.2095, |
|
"eval_samples_per_second": 16.635, |
|
"eval_steps_per_second": 0.534, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.1220159151193636, |
|
"eval_accuracy": 0.606785049583925, |
|
"eval_loss": 1.8889071941375732, |
|
"eval_runtime": 26.1074, |
|
"eval_samples_per_second": 16.7, |
|
"eval_steps_per_second": 0.536, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.2104332449160036, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 1.315944591806661e-05, |
|
"loss": 1.8197, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.2104332449160036, |
|
"eval_accuracy": 0.6080460875913235, |
|
"eval_loss": 1.8847737312316895, |
|
"eval_runtime": 26.1528, |
|
"eval_samples_per_second": 16.671, |
|
"eval_steps_per_second": 0.535, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.2988505747126435, |
|
"eval_accuracy": 0.6090524026863879, |
|
"eval_loss": 1.8735781908035278, |
|
"eval_runtime": 26.1209, |
|
"eval_samples_per_second": 16.692, |
|
"eval_steps_per_second": 0.536, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.387267904509284, |
|
"eval_accuracy": 0.6072484329163864, |
|
"eval_loss": 1.8857922554016113, |
|
"eval_runtime": 26.0234, |
|
"eval_samples_per_second": 16.754, |
|
"eval_steps_per_second": 0.538, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2.475685234305924, |
|
"eval_accuracy": 0.6088222983902897, |
|
"eval_loss": 1.8814462423324585, |
|
"eval_runtime": 26.1989, |
|
"eval_samples_per_second": 16.642, |
|
"eval_steps_per_second": 0.534, |
|
"step": 2800 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 3393, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 200, |
|
"total_flos": 1.4255722173195878e+18, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|