|
{ |
|
"best_metric": 0.7769453778278917, |
|
"best_model_checkpoint": "2-parallel-pt-nl-pl-**zxh4546/allnli_wikispan_unsup_ensemble_last**-64-128-3e-5-9400/checkpoint-9200", |
|
"epoch": 1.0014915832090348, |
|
"global_step": 9400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 6e-06, |
|
"loss": 68.9386, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.2e-05, |
|
"loss": 29.743, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.8e-05, |
|
"loss": 22.6828, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 2.4e-05, |
|
"loss": 18.3538, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3e-05, |
|
"loss": 16.8795, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 2.9285714285714284e-05, |
|
"loss": 15.0949, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 13.1123, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 2.7857142857142858e-05, |
|
"loss": 11.6076, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 2.7142857142857144e-05, |
|
"loss": 11.0653, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.6428571428571428e-05, |
|
"loss": 10.2023, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.5714285714285714e-05, |
|
"loss": 9.4697, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 2.5e-05, |
|
"loss": 9.073, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.4285714285714288e-05, |
|
"loss": 8.2219, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 2.357142857142857e-05, |
|
"loss": 8.1709, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 2.2857142857142858e-05, |
|
"loss": 7.6007, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 2.2142857142857145e-05, |
|
"loss": 7.3222, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 2.1428571428571428e-05, |
|
"loss": 6.8397, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 2.0714285714285715e-05, |
|
"loss": 6.7136, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 6.4772, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.928571428571429e-05, |
|
"loss": 6.136, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.8571428571428572e-05, |
|
"loss": 6.3031, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.785714285714286e-05, |
|
"loss": 5.8973, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.7142857142857142e-05, |
|
"loss": 5.849, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.6428571428571432e-05, |
|
"loss": 5.7724, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 1.5714285714285715e-05, |
|
"loss": 5.5508, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5e-05, |
|
"loss": 5.1375, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 5.3564, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.3571428571428572e-05, |
|
"loss": 5.173, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 1.2857142857142857e-05, |
|
"loss": 4.8572, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 1.2142857142857144e-05, |
|
"loss": 5.0517, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.1428571428571429e-05, |
|
"loss": 4.8886, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 1.0714285714285714e-05, |
|
"loss": 4.7235, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 4.4131, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.285714285714286e-06, |
|
"loss": 4.6411, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 4.3489, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.857142857142858e-06, |
|
"loss": 4.4199, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 7.142857142857143e-06, |
|
"loss": 4.2557, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.428571428571429e-06, |
|
"loss": 3.9598, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 5.7142857142857145e-06, |
|
"loss": 4.1423, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 3.9408, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 4.2857142857142855e-06, |
|
"loss": 4.1499, |
|
"step": 8200 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 3.5714285714285714e-06, |
|
"loss": 4.0065, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.8571428571428573e-06, |
|
"loss": 3.7546, |
|
"step": 8600 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1428571428571427e-06, |
|
"loss": 3.8722, |
|
"step": 8800 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 3.7097, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 7.142857142857143e-07, |
|
"loss": 4.0694, |
|
"step": 9200 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 3.8382, |
|
"step": 9400 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 9400, |
|
"total_flos": 622939976497152.0, |
|
"train_loss": 8.931647637549867, |
|
"train_runtime": 5062.6195, |
|
"train_samples_per_second": 237.664, |
|
"train_steps_per_second": 1.857 |
|
} |
|
], |
|
"max_steps": 9400, |
|
"num_train_epochs": 2, |
|
"total_flos": 622939976497152.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|