|
{ |
|
"best_metric": 0.9384684432613534, |
|
"best_model_checkpoint": "esm2_t30_150M_UR50D-finetuned-epitope_attempt7_150M_200k_default_accuracy/checkpoint-4576", |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 22880, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.781468531468531e-05, |
|
"loss": 0.2387, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.562937062937063e-05, |
|
"loss": 0.2219, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 9.344405594405596e-05, |
|
"loss": 0.2248, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.125874125874126e-05, |
|
"loss": 0.2252, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.93836177956272, |
|
"eval_loss": 0.22232767939567566, |
|
"eval_runtime": 73.981, |
|
"eval_samples_per_second": 61.84, |
|
"eval_steps_per_second": 3.447, |
|
"step": 2288 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.907342657342657e-05, |
|
"loss": 0.2174, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 8.688811188811189e-05, |
|
"loss": 0.2174, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.47027972027972e-05, |
|
"loss": 0.2133, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 8.251748251748252e-05, |
|
"loss": 0.2153, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 8.033216783216784e-05, |
|
"loss": 0.2129, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9384684432613534, |
|
"eval_loss": 0.21357476711273193, |
|
"eval_runtime": 72.5716, |
|
"eval_samples_per_second": 63.041, |
|
"eval_steps_per_second": 3.514, |
|
"step": 4576 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 7.814685314685315e-05, |
|
"loss": 0.2041, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.596153846153846e-05, |
|
"loss": 0.1996, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.377622377622378e-05, |
|
"loss": 0.2019, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.15909090909091e-05, |
|
"loss": 0.2018, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9380660067945291, |
|
"eval_loss": 0.21539297699928284, |
|
"eval_runtime": 72.5454, |
|
"eval_samples_per_second": 63.064, |
|
"eval_steps_per_second": 3.515, |
|
"step": 6864 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 6.940559440559441e-05, |
|
"loss": 0.1987, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 6.722027972027972e-05, |
|
"loss": 0.1793, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.503496503496504e-05, |
|
"loss": 0.1827, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.284965034965036e-05, |
|
"loss": 0.1834, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 6.066433566433567e-05, |
|
"loss": 0.1846, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9373786185144478, |
|
"eval_loss": 0.2264455258846283, |
|
"eval_runtime": 72.7083, |
|
"eval_samples_per_second": 62.923, |
|
"eval_steps_per_second": 3.507, |
|
"step": 9152 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 5.8479020979020984e-05, |
|
"loss": 0.1619, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 5.629370629370629e-05, |
|
"loss": 0.1514, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 5.4108391608391606e-05, |
|
"loss": 0.1605, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 5.192307692307693e-05, |
|
"loss": 0.1556, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9329569702150639, |
|
"eval_loss": 0.2557878792285919, |
|
"eval_runtime": 72.6531, |
|
"eval_samples_per_second": 62.97, |
|
"eval_steps_per_second": 3.51, |
|
"step": 11440 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 4.973776223776224e-05, |
|
"loss": 0.1522, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 4.755244755244756e-05, |
|
"loss": 0.1183, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 4.5367132867132865e-05, |
|
"loss": 0.13, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.318181818181819e-05, |
|
"loss": 0.1314, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 4.0996503496503495e-05, |
|
"loss": 0.1335, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9281792609700007, |
|
"eval_loss": 0.29685506224632263, |
|
"eval_runtime": 72.5857, |
|
"eval_samples_per_second": 63.029, |
|
"eval_steps_per_second": 3.513, |
|
"step": 13728 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.8811188811188816e-05, |
|
"loss": 0.1169, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.6625874125874124e-05, |
|
"loss": 0.1083, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.4440559440559445e-05, |
|
"loss": 0.1126, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 3.225524475524476e-05, |
|
"loss": 0.1098, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"learning_rate": 3.0069930069930068e-05, |
|
"loss": 0.1142, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9246155598117979, |
|
"eval_loss": 0.3515949547290802, |
|
"eval_runtime": 72.4969, |
|
"eval_samples_per_second": 63.106, |
|
"eval_steps_per_second": 3.517, |
|
"step": 16016 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 2.7884615384615386e-05, |
|
"loss": 0.0948, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 2.5699300699300697e-05, |
|
"loss": 0.0956, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 2.3513986013986015e-05, |
|
"loss": 0.0955, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 7.87, |
|
"learning_rate": 2.132867132867133e-05, |
|
"loss": 0.0988, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.925055096695538, |
|
"eval_loss": 0.3799491226673126, |
|
"eval_runtime": 72.5847, |
|
"eval_samples_per_second": 63.03, |
|
"eval_steps_per_second": 3.513, |
|
"step": 18304 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.9143356643356645e-05, |
|
"loss": 0.092, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"learning_rate": 1.695804195804196e-05, |
|
"loss": 0.0822, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 1.4772727272727274e-05, |
|
"loss": 0.0854, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 1.2587412587412589e-05, |
|
"loss": 0.0885, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 1.0402097902097904e-05, |
|
"loss": 0.0842, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.9202073088851892, |
|
"eval_loss": 0.4125804305076599, |
|
"eval_runtime": 72.5474, |
|
"eval_samples_per_second": 63.062, |
|
"eval_steps_per_second": 3.515, |
|
"step": 20592 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 8.216783216783217e-06, |
|
"loss": 0.0761, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 6.031468531468531e-06, |
|
"loss": 0.074, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 3.846153846153847e-06, |
|
"loss": 0.0758, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 9.83, |
|
"learning_rate": 1.6608391608391608e-06, |
|
"loss": 0.0747, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.9171542537431486, |
|
"eval_loss": 0.4537607729434967, |
|
"eval_runtime": 72.8015, |
|
"eval_samples_per_second": 62.842, |
|
"eval_steps_per_second": 3.503, |
|
"step": 22880 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 22880, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"total_flos": 3.257616148755644e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|