|
{ |
|
"best_metric": 1.5115073919296265, |
|
"best_model_checkpoint": "tam_test_out_drug_data_large_test/checkpoint-40325", |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 40325, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 606205.1875, |
|
"learning_rate": 5.925604463732176e-05, |
|
"loss": 2.1094, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 535000.3125, |
|
"learning_rate": 5.851208927464352e-05, |
|
"loss": 2.0692, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 756629.3125, |
|
"learning_rate": 5.7768133911965284e-05, |
|
"loss": 2.0081, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 652686.0, |
|
"learning_rate": 5.7024178549287045e-05, |
|
"loss": 1.9094, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 713175.3125, |
|
"learning_rate": 5.6280223186608805e-05, |
|
"loss": 1.8251, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 514924.375, |
|
"learning_rate": 5.5536267823930566e-05, |
|
"loss": 1.8088, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 598626.25, |
|
"learning_rate": 5.479231246125233e-05, |
|
"loss": 1.7972, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 455352.53125, |
|
"learning_rate": 5.404835709857409e-05, |
|
"loss": 1.7607, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 498720.8125, |
|
"learning_rate": 5.330440173589585e-05, |
|
"loss": 1.7936, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 416274.09375, |
|
"learning_rate": 5.256044637321761e-05, |
|
"loss": 1.7587, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 426318.875, |
|
"learning_rate": 5.181649101053937e-05, |
|
"loss": 1.778, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 245576.5, |
|
"learning_rate": 5.107253564786113e-05, |
|
"loss": 1.7174, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 343734.96875, |
|
"learning_rate": 5.032858028518289e-05, |
|
"loss": 1.7171, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 543570.625, |
|
"learning_rate": 4.958462492250465e-05, |
|
"loss": 1.7212, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 364161.46875, |
|
"learning_rate": 4.884066955982641e-05, |
|
"loss": 1.6634, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 363151.9375, |
|
"learning_rate": 4.8096714197148174e-05, |
|
"loss": 1.6824, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 584323.75, |
|
"learning_rate": 4.7352758834469934e-05, |
|
"loss": 1.683, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 517269.78125, |
|
"learning_rate": 4.6608803471791695e-05, |
|
"loss": 1.6421, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 589548.0, |
|
"learning_rate": 4.5864848109113456e-05, |
|
"loss": 1.629, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 516179.65625, |
|
"learning_rate": 4.5120892746435217e-05, |
|
"loss": 1.6111, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 719886.4375, |
|
"learning_rate": 4.437693738375697e-05, |
|
"loss": 1.6725, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 374763.96875, |
|
"learning_rate": 4.363298202107873e-05, |
|
"loss": 1.6587, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 539950.75, |
|
"learning_rate": 4.288902665840049e-05, |
|
"loss": 1.5891, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 841417.125, |
|
"learning_rate": 4.214507129572225e-05, |
|
"loss": 1.6487, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 413180.6875, |
|
"learning_rate": 4.1401115933044014e-05, |
|
"loss": 1.6599, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 724763.75, |
|
"learning_rate": 4.0657160570365774e-05, |
|
"loss": 1.6468, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 630071.0, |
|
"learning_rate": 3.9913205207687535e-05, |
|
"loss": 1.6445, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 413867.3125, |
|
"learning_rate": 3.9169249845009296e-05, |
|
"loss": 1.6255, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 431392.0625, |
|
"learning_rate": 3.8425294482331057e-05, |
|
"loss": 1.607, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 465831.90625, |
|
"learning_rate": 3.768133911965282e-05, |
|
"loss": 1.6187, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 578294.4375, |
|
"learning_rate": 3.693738375697458e-05, |
|
"loss": 1.6044, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 463151.03125, |
|
"learning_rate": 3.619342839429634e-05, |
|
"loss": 1.6294, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 394089.59375, |
|
"learning_rate": 3.54494730316181e-05, |
|
"loss": 1.6219, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 808582.75, |
|
"learning_rate": 3.470551766893986e-05, |
|
"loss": 1.5825, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 166829.9375, |
|
"learning_rate": 3.396156230626162e-05, |
|
"loss": 1.5696, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 755760.125, |
|
"learning_rate": 3.321760694358338e-05, |
|
"loss": 1.6148, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 894633.5625, |
|
"learning_rate": 3.247365158090515e-05, |
|
"loss": 1.6118, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 445554.59375, |
|
"learning_rate": 3.172969621822691e-05, |
|
"loss": 1.6123, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 458144.21875, |
|
"learning_rate": 3.098574085554867e-05, |
|
"loss": 1.628, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 382592.625, |
|
"learning_rate": 3.0241785492870428e-05, |
|
"loss": 1.6075, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 343228.125, |
|
"learning_rate": 2.949783013019219e-05, |
|
"loss": 1.6086, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 432318.65625, |
|
"learning_rate": 2.875387476751395e-05, |
|
"loss": 1.5723, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 264779.78125, |
|
"learning_rate": 2.800991940483571e-05, |
|
"loss": 1.6276, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 583042.0625, |
|
"learning_rate": 2.726596404215747e-05, |
|
"loss": 1.583, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 402780.90625, |
|
"learning_rate": 2.6522008679479232e-05, |
|
"loss": 1.5676, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 528016.6875, |
|
"learning_rate": 2.5778053316800993e-05, |
|
"loss": 1.5991, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 442681.25, |
|
"learning_rate": 2.5034097954122753e-05, |
|
"loss": 1.5809, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 820616.5625, |
|
"learning_rate": 2.4290142591444514e-05, |
|
"loss": 1.5583, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 527036.1875, |
|
"learning_rate": 2.3546187228766275e-05, |
|
"loss": 1.5536, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 425855.125, |
|
"learning_rate": 2.2802231866088036e-05, |
|
"loss": 1.604, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 477485.53125, |
|
"learning_rate": 2.2058276503409793e-05, |
|
"loss": 1.5564, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 684294.125, |
|
"learning_rate": 2.1314321140731554e-05, |
|
"loss": 1.5505, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 687388.875, |
|
"learning_rate": 2.0570365778053315e-05, |
|
"loss": 1.5636, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 827780.125, |
|
"learning_rate": 1.9826410415375075e-05, |
|
"loss": 1.5526, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 488844.8125, |
|
"learning_rate": 1.908245505269684e-05, |
|
"loss": 1.5192, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 463870.6875, |
|
"learning_rate": 1.83384996900186e-05, |
|
"loss": 1.5441, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 906024.625, |
|
"learning_rate": 1.759454432734036e-05, |
|
"loss": 1.5328, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 539641.375, |
|
"learning_rate": 1.6850588964662122e-05, |
|
"loss": 1.5892, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 773464.4375, |
|
"learning_rate": 1.6106633601983882e-05, |
|
"loss": 1.5384, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 939030.25, |
|
"learning_rate": 1.5362678239305643e-05, |
|
"loss": 1.588, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 715276.5, |
|
"learning_rate": 1.4618722876627402e-05, |
|
"loss": 1.5575, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 478972.75, |
|
"learning_rate": 1.3874767513949163e-05, |
|
"loss": 1.5229, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 666075.0, |
|
"learning_rate": 1.3130812151270924e-05, |
|
"loss": 1.5491, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 706126.125, |
|
"learning_rate": 1.2386856788592685e-05, |
|
"loss": 1.5522, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 380624.65625, |
|
"learning_rate": 1.1642901425914447e-05, |
|
"loss": 1.5408, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 436867.90625, |
|
"learning_rate": 1.0898946063236206e-05, |
|
"loss": 1.5076, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 495405.875, |
|
"learning_rate": 1.0154990700557967e-05, |
|
"loss": 1.5355, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 298892.375, |
|
"learning_rate": 9.411035337879728e-06, |
|
"loss": 1.4743, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 455935.5, |
|
"learning_rate": 8.667079975201488e-06, |
|
"loss": 1.5539, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 513792.59375, |
|
"learning_rate": 7.923124612523249e-06, |
|
"loss": 1.5538, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 156134.75, |
|
"learning_rate": 7.17916924984501e-06, |
|
"loss": 1.5211, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 522846.1875, |
|
"learning_rate": 6.4352138871667705e-06, |
|
"loss": 1.5259, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 373932.15625, |
|
"learning_rate": 5.691258524488531e-06, |
|
"loss": 1.4907, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 747137.0625, |
|
"learning_rate": 4.947303161810291e-06, |
|
"loss": 1.5323, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 354971.9375, |
|
"learning_rate": 4.203347799132052e-06, |
|
"loss": 1.5479, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 623187.875, |
|
"learning_rate": 3.459392436453813e-06, |
|
"loss": 1.5263, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 348824.09375, |
|
"learning_rate": 2.7154370737755734e-06, |
|
"loss": 1.4994, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 538969.25, |
|
"learning_rate": 1.971481711097334e-06, |
|
"loss": 1.5384, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 965487.0625, |
|
"learning_rate": 1.2275263484190947e-06, |
|
"loss": 1.5025, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 504319.8125, |
|
"learning_rate": 4.835709857408556e-07, |
|
"loss": 1.5344, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.4533618525062773, |
|
"eval_loss": 1.5115073919296265, |
|
"eval_runtime": 520.995, |
|
"eval_samples_per_second": 20.639, |
|
"eval_steps_per_second": 5.161, |
|
"step": 40325 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 40325, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"total_flos": 2.532444326342784e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|