|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 314, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 40.78604947627245, |
|
"learning_rate": 2e-05, |
|
"loss": 3.5775, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.06369426751592357, |
|
"grad_norm": 8.368950744101049, |
|
"learning_rate": 2e-05, |
|
"loss": 1.1247, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09554140127388536, |
|
"grad_norm": 9.457819678472001, |
|
"learning_rate": 2e-05, |
|
"loss": 0.7369, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 5.641687715871123, |
|
"learning_rate": 2e-05, |
|
"loss": 0.5158, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1592356687898089, |
|
"grad_norm": 5.418478301114858, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4339, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1910828025477707, |
|
"grad_norm": 5.045194163255571, |
|
"learning_rate": 2e-05, |
|
"loss": 0.463, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2229299363057325, |
|
"grad_norm": 4.781645335845452, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3769, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 5.347347835798862, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3619, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.28662420382165604, |
|
"grad_norm": 4.246397453328318, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3027, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"grad_norm": 4.33165925517153, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2415, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3503184713375796, |
|
"grad_norm": 3.844921413622979, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2504, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3821656050955414, |
|
"grad_norm": 6.45954282043095, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2317, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4140127388535032, |
|
"grad_norm": 4.385098986777501, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2197, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.445859872611465, |
|
"grad_norm": 4.208753643886586, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2132, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.47770700636942676, |
|
"grad_norm": 2.771863737776465, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2171, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5095541401273885, |
|
"grad_norm": 2.34352087689591, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2368, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5414012738853503, |
|
"grad_norm": 2.837909679350126, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2246, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5732484076433121, |
|
"grad_norm": 4.432629271419918, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2425, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6050955414012739, |
|
"grad_norm": 2.0628378042750244, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2084, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6369426751592356, |
|
"grad_norm": 3.2999762267826993, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1822, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6687898089171974, |
|
"grad_norm": 1.7122002214796863, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1756, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7006369426751592, |
|
"grad_norm": 3.2760105813071396, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2213, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.732484076433121, |
|
"grad_norm": 3.283904309843202, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2022, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7643312101910829, |
|
"grad_norm": 2.600480873861268, |
|
"learning_rate": 2e-05, |
|
"loss": 0.2137, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7961783439490446, |
|
"grad_norm": 2.81626515746397, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1902, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8280254777070064, |
|
"grad_norm": 3.7954679627760846, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1856, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8598726114649682, |
|
"grad_norm": 3.872799664454645, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1763, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.89171974522293, |
|
"grad_norm": 3.284248984016964, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1994, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9235668789808917, |
|
"grad_norm": 3.307812812852979, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1542, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9554140127388535, |
|
"grad_norm": 3.4547396546144338, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1916, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9872611464968153, |
|
"grad_norm": 2.4075546087603814, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1619, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.019108280254777, |
|
"grad_norm": 1.647706175151772, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1193, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0509554140127388, |
|
"grad_norm": 2.023644719658301, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0982, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0828025477707006, |
|
"grad_norm": 2.31298016202094, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1076, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1146496815286624, |
|
"grad_norm": 3.1177330537998893, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1043, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1464968152866242, |
|
"grad_norm": 2.0318620194736385, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1098, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.178343949044586, |
|
"grad_norm": 1.8723189582500832, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1077, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2101910828025477, |
|
"grad_norm": 1.8173816222586743, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0878, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.2420382165605095, |
|
"grad_norm": 1.9562933607571518, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1072, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2738853503184713, |
|
"grad_norm": 1.727322800396086, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1092, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.305732484076433, |
|
"grad_norm": 1.8614693734662744, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1104, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.3375796178343948, |
|
"grad_norm": 1.1917814271310345, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0801, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.3694267515923566, |
|
"grad_norm": 2.3490229391297817, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0865, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.4012738853503186, |
|
"grad_norm": 2.353167768601884, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1017, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.4331210191082802, |
|
"grad_norm": 1.9334989797015238, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1077, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.4649681528662422, |
|
"grad_norm": 3.117979494089919, |
|
"learning_rate": 2e-05, |
|
"loss": 0.104, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.4968152866242037, |
|
"grad_norm": 1.5255884501650592, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1059, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.5286624203821657, |
|
"grad_norm": 3.232967624083442, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1287, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.5605095541401273, |
|
"grad_norm": 2.159172752729262, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1261, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.5923566878980893, |
|
"grad_norm": 2.265632275382293, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1144, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.6242038216560508, |
|
"grad_norm": 1.4487469560262782, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0979, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.6560509554140128, |
|
"grad_norm": 2.119134977703782, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0959, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.6878980891719744, |
|
"grad_norm": 2.012372872214993, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0935, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.7197452229299364, |
|
"grad_norm": 1.903501479994221, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1052, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.7515923566878981, |
|
"grad_norm": 1.3209208004358894, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1049, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.78343949044586, |
|
"grad_norm": 1.8426392287716327, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1076, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.8152866242038217, |
|
"grad_norm": 1.7290464386835456, |
|
"learning_rate": 2e-05, |
|
"loss": 0.114, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.8471337579617835, |
|
"grad_norm": 1.8841891982633818, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1089, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.8789808917197452, |
|
"grad_norm": 1.7984664142767979, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1158, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.910828025477707, |
|
"grad_norm": 1.7696270341848916, |
|
"learning_rate": 2e-05, |
|
"loss": 0.1011, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.9426751592356688, |
|
"grad_norm": 1.378308282552831, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0949, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.9745222929936306, |
|
"grad_norm": 1.4334428707330111, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0843, |
|
"step": 310 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 314, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 157, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2702507507712.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|