|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.21238938053097345, |
|
"eval_steps": 40, |
|
"global_step": 240, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008849557522123894, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 5.8564, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017699115044247787, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 7.1716, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002654867256637168, |
|
"grad_norm": NaN, |
|
"learning_rate": 0.0, |
|
"loss": 5.9095, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0035398230088495575, |
|
"grad_norm": 21.95326805114746, |
|
"learning_rate": 3.5377358490566036e-09, |
|
"loss": 5.0841, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004424778761061947, |
|
"grad_norm": 16.607179641723633, |
|
"learning_rate": 7.075471698113207e-09, |
|
"loss": 4.0184, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005309734513274336, |
|
"grad_norm": 33.789615631103516, |
|
"learning_rate": 1.0613207547169811e-08, |
|
"loss": 6.2191, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.006194690265486726, |
|
"grad_norm": 28.073551177978516, |
|
"learning_rate": 1.4150943396226414e-08, |
|
"loss": 5.6124, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.007079646017699115, |
|
"grad_norm": 17.365602493286133, |
|
"learning_rate": 1.768867924528302e-08, |
|
"loss": 3.9544, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007964601769911504, |
|
"grad_norm": 19.384475708007812, |
|
"learning_rate": 2.1226415094339622e-08, |
|
"loss": 4.7149, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.008849557522123894, |
|
"grad_norm": 19.67770004272461, |
|
"learning_rate": 2.4764150943396227e-08, |
|
"loss": 4.9616, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009734513274336283, |
|
"grad_norm": 24.233421325683594, |
|
"learning_rate": 2.830188679245283e-08, |
|
"loss": 5.2794, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010619469026548672, |
|
"grad_norm": Infinity, |
|
"learning_rate": 2.830188679245283e-08, |
|
"loss": 8.8704, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011504424778761062, |
|
"grad_norm": 34.37785720825195, |
|
"learning_rate": 3.183962264150943e-08, |
|
"loss": 6.0707, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.012389380530973451, |
|
"grad_norm": 25.11741065979004, |
|
"learning_rate": 3.537735849056604e-08, |
|
"loss": 5.4071, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01327433628318584, |
|
"grad_norm": 53.84364700317383, |
|
"learning_rate": 3.891509433962264e-08, |
|
"loss": 6.9104, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01415929203539823, |
|
"grad_norm": 32.0903434753418, |
|
"learning_rate": 4.2452830188679244e-08, |
|
"loss": 6.0276, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01504424778761062, |
|
"grad_norm": 39.742130279541016, |
|
"learning_rate": 4.599056603773585e-08, |
|
"loss": 6.737, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01592920353982301, |
|
"grad_norm": 45.267417907714844, |
|
"learning_rate": 4.9528301886792454e-08, |
|
"loss": 6.5354, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.016814159292035398, |
|
"grad_norm": 22.39731788635254, |
|
"learning_rate": 5.3066037735849055e-08, |
|
"loss": 5.206, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.017699115044247787, |
|
"grad_norm": 20.858232498168945, |
|
"learning_rate": 5.660377358490566e-08, |
|
"loss": 5.2469, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.018584070796460177, |
|
"grad_norm": 23.96446990966797, |
|
"learning_rate": 6.014150943396226e-08, |
|
"loss": 5.3771, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.019469026548672566, |
|
"grad_norm": 22.945741653442383, |
|
"learning_rate": 6.367924528301887e-08, |
|
"loss": 4.979, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.020353982300884955, |
|
"grad_norm": 15.497300148010254, |
|
"learning_rate": 6.721698113207547e-08, |
|
"loss": 4.7909, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.021238938053097345, |
|
"grad_norm": 20.039024353027344, |
|
"learning_rate": 7.075471698113208e-08, |
|
"loss": 4.9086, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.022123893805309734, |
|
"grad_norm": 21.30576515197754, |
|
"learning_rate": 7.429245283018869e-08, |
|
"loss": 4.8826, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.023008849557522124, |
|
"grad_norm": 64.5285873413086, |
|
"learning_rate": 7.783018867924529e-08, |
|
"loss": 8.2266, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.023893805309734513, |
|
"grad_norm": 59.894893646240234, |
|
"learning_rate": 8.13679245283019e-08, |
|
"loss": 8.3024, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.024778761061946902, |
|
"grad_norm": 25.504356384277344, |
|
"learning_rate": 8.490566037735849e-08, |
|
"loss": 5.8745, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02566371681415929, |
|
"grad_norm": 15.169568061828613, |
|
"learning_rate": 8.84433962264151e-08, |
|
"loss": 4.7298, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02654867256637168, |
|
"grad_norm": 24.09995460510254, |
|
"learning_rate": 9.19811320754717e-08, |
|
"loss": 5.4614, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02743362831858407, |
|
"grad_norm": 28.669275283813477, |
|
"learning_rate": 9.55188679245283e-08, |
|
"loss": 5.8594, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02831858407079646, |
|
"grad_norm": 23.37987518310547, |
|
"learning_rate": 9.905660377358491e-08, |
|
"loss": 5.2401, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02920353982300885, |
|
"grad_norm": 22.815292358398438, |
|
"learning_rate": 1.0259433962264152e-07, |
|
"loss": 5.1579, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03008849557522124, |
|
"grad_norm": 13.775344848632812, |
|
"learning_rate": 1.0613207547169811e-07, |
|
"loss": 5.2181, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.030973451327433628, |
|
"grad_norm": 18.642087936401367, |
|
"learning_rate": 1.0966981132075472e-07, |
|
"loss": 4.6328, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03185840707964602, |
|
"grad_norm": 18.041406631469727, |
|
"learning_rate": 1.1320754716981131e-07, |
|
"loss": 2.121, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03274336283185841, |
|
"grad_norm": 23.423933029174805, |
|
"learning_rate": 1.1674528301886792e-07, |
|
"loss": 5.9026, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.033628318584070796, |
|
"grad_norm": 46.25591278076172, |
|
"learning_rate": 1.2028301886792452e-07, |
|
"loss": 7.3796, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.034513274336283185, |
|
"grad_norm": 20.376422882080078, |
|
"learning_rate": 1.2382075471698114e-07, |
|
"loss": 5.5361, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"grad_norm": 12.82562255859375, |
|
"learning_rate": 1.2735849056603773e-07, |
|
"loss": 4.0243, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_Qnli-dev_cosine_accuracy": 0.5859375, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9302856922149658, |
|
"eval_Qnli-dev_cosine_ap": 0.5480269179285036, |
|
"eval_Qnli-dev_cosine_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7634451389312744, |
|
"eval_Qnli-dev_cosine_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_cosine_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_dot_accuracy": 0.5859375, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 714.4895629882812, |
|
"eval_Qnli-dev_dot_ap": 0.548060663242546, |
|
"eval_Qnli-dev_dot_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_dot_f1_threshold": 586.342529296875, |
|
"eval_Qnli-dev_dot_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_dot_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.5859375, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.348224639892578, |
|
"eval_Qnli-dev_euclidean_ap": 0.5480269179285036, |
|
"eval_Qnli-dev_euclidean_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.05518341064453, |
|
"eval_Qnli-dev_euclidean_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_euclidean_recall": 0.9915254237288136, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.59765625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 175.22628784179688, |
|
"eval_Qnli-dev_manhattan_ap": 0.5780924813828909, |
|
"eval_Qnli-dev_manhattan_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 334.39178466796875, |
|
"eval_Qnli-dev_manhattan_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_manhattan_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_max_accuracy": 0.59765625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 714.4895629882812, |
|
"eval_Qnli-dev_max_ap": 0.5780924813828909, |
|
"eval_Qnli-dev_max_f1": 0.6315789473684211, |
|
"eval_Qnli-dev_max_f1_threshold": 586.342529296875, |
|
"eval_Qnli-dev_max_precision": 0.4633663366336634, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.6640625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9888672828674316, |
|
"eval_allNLI-dev_cosine_ap": 0.32886365768247516, |
|
"eval_allNLI-dev_cosine_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7477295398712158, |
|
"eval_allNLI-dev_cosine_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.6640625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 759.483154296875, |
|
"eval_allNLI-dev_dot_ap": 0.3288581611938815, |
|
"eval_allNLI-dev_dot_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_dot_f1_threshold": 574.2760620117188, |
|
"eval_allNLI-dev_dot_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.6640625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 3.8085508346557617, |
|
"eval_allNLI-dev_euclidean_ap": 0.32886365768247516, |
|
"eval_allNLI-dev_euclidean_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 19.684810638427734, |
|
"eval_allNLI-dev_euclidean_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_euclidean_recall": 1.0, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 65.93238830566406, |
|
"eval_allNLI-dev_manhattan_ap": 0.33852594919898543, |
|
"eval_allNLI-dev_manhattan_f1": 0.5058479532163743, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 335.4263916015625, |
|
"eval_allNLI-dev_manhattan_precision": 0.3385518590998043, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.6640625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 759.483154296875, |
|
"eval_allNLI-dev_max_ap": 0.33852594919898543, |
|
"eval_allNLI-dev_max_f1": 0.5095729013254787, |
|
"eval_allNLI-dev_max_f1_threshold": 574.2760620117188, |
|
"eval_allNLI-dev_max_precision": 0.34189723320158105, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5780924813828909, |
|
"eval_sts-test_pearson_cosine": 0.1533465318414369, |
|
"eval_sts-test_pearson_dot": 0.15333057450060855, |
|
"eval_sts-test_pearson_euclidean": 0.1664717893342273, |
|
"eval_sts-test_pearson_manhattan": 0.20717970064899288, |
|
"eval_sts-test_pearson_max": 0.20717970064899288, |
|
"eval_sts-test_spearman_cosine": 0.18786210334203038, |
|
"eval_sts-test_spearman_dot": 0.1878347337472397, |
|
"eval_sts-test_spearman_euclidean": 0.18786046572196458, |
|
"eval_sts-test_spearman_manhattan": 0.22429466463153608, |
|
"eval_sts-test_spearman_max": 0.22429466463153608, |
|
"eval_vitaminc-pairs_loss": 2.901831865310669, |
|
"eval_vitaminc-pairs_runtime": 4.078, |
|
"eval_vitaminc-pairs_samples_per_second": 31.388, |
|
"eval_vitaminc-pairs_steps_per_second": 0.245, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_negation-triplets_loss": 5.690315246582031, |
|
"eval_negation-triplets_runtime": 0.7141, |
|
"eval_negation-triplets_samples_per_second": 179.254, |
|
"eval_negation-triplets_steps_per_second": 1.4, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_scitail-pairs-pos_loss": 2.1135852336883545, |
|
"eval_scitail-pairs-pos_runtime": 0.8282, |
|
"eval_scitail-pairs-pos_samples_per_second": 154.543, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.207, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_scitail-pairs-qa_loss": 2.8052029609680176, |
|
"eval_scitail-pairs-qa_runtime": 0.5471, |
|
"eval_scitail-pairs-qa_samples_per_second": 233.943, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.828, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_xsum-pairs_loss": 6.583061695098877, |
|
"eval_xsum-pairs_runtime": 2.8921, |
|
"eval_xsum-pairs_samples_per_second": 44.259, |
|
"eval_xsum-pairs_steps_per_second": 0.346, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_sciq_pairs_loss": 0.8882207870483398, |
|
"eval_sciq_pairs_runtime": 3.7993, |
|
"eval_sciq_pairs_samples_per_second": 33.69, |
|
"eval_sciq_pairs_steps_per_second": 0.263, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_qasc_pairs_loss": 4.1147541999816895, |
|
"eval_qasc_pairs_runtime": 0.6768, |
|
"eval_qasc_pairs_samples_per_second": 189.125, |
|
"eval_qasc_pairs_steps_per_second": 1.478, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_openbookqa_pairs_loss": 5.096628665924072, |
|
"eval_openbookqa_pairs_runtime": 0.5776, |
|
"eval_openbookqa_pairs_samples_per_second": 221.615, |
|
"eval_openbookqa_pairs_steps_per_second": 1.731, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_msmarco_pairs_loss": 10.391141891479492, |
|
"eval_msmarco_pairs_runtime": 1.2577, |
|
"eval_msmarco_pairs_samples_per_second": 101.77, |
|
"eval_msmarco_pairs_steps_per_second": 0.795, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_nq_pairs_loss": 10.903197288513184, |
|
"eval_nq_pairs_runtime": 2.5051, |
|
"eval_nq_pairs_samples_per_second": 51.095, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_trivia_pairs_loss": 7.190384387969971, |
|
"eval_trivia_pairs_runtime": 3.6482, |
|
"eval_trivia_pairs_samples_per_second": 35.085, |
|
"eval_trivia_pairs_steps_per_second": 0.274, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_gooaq_pairs_loss": 8.193528175354004, |
|
"eval_gooaq_pairs_runtime": 0.9648, |
|
"eval_gooaq_pairs_samples_per_second": 132.67, |
|
"eval_gooaq_pairs_steps_per_second": 1.036, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_paws-pos_loss": 1.3942564725875854, |
|
"eval_paws-pos_runtime": 0.6718, |
|
"eval_paws-pos_samples_per_second": 190.538, |
|
"eval_paws-pos_steps_per_second": 1.489, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.035398230088495575, |
|
"eval_global_dataset_loss": 5.671571731567383, |
|
"eval_global_dataset_runtime": 23.0452, |
|
"eval_global_dataset_samples_per_second": 28.77, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.036283185840707964, |
|
"grad_norm": 18.026830673217773, |
|
"learning_rate": 1.3089622641509433e-07, |
|
"loss": 4.9072, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03716814159292035, |
|
"grad_norm": 15.423810958862305, |
|
"learning_rate": 1.3443396226415095e-07, |
|
"loss": 3.4439, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03805309734513274, |
|
"grad_norm": 16.31403160095215, |
|
"learning_rate": 1.3797169811320754e-07, |
|
"loss": 4.9787, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03893805309734513, |
|
"grad_norm": 21.37955093383789, |
|
"learning_rate": 1.4150943396226417e-07, |
|
"loss": 5.8318, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03982300884955752, |
|
"grad_norm": 18.23583984375, |
|
"learning_rate": 1.4504716981132076e-07, |
|
"loss": 5.3226, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04070796460176991, |
|
"grad_norm": 20.878713607788086, |
|
"learning_rate": 1.4858490566037738e-07, |
|
"loss": 5.1181, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0415929203539823, |
|
"grad_norm": 18.71149444580078, |
|
"learning_rate": 1.5212264150943398e-07, |
|
"loss": 4.7834, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04247787610619469, |
|
"grad_norm": 38.85902786254883, |
|
"learning_rate": 1.5566037735849057e-07, |
|
"loss": 6.6303, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04336283185840708, |
|
"grad_norm": 37.41562271118164, |
|
"learning_rate": 1.591981132075472e-07, |
|
"loss": 5.8171, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04424778761061947, |
|
"grad_norm": 17.541080474853516, |
|
"learning_rate": 1.627358490566038e-07, |
|
"loss": 5.1962, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04513274336283186, |
|
"grad_norm": 16.145116806030273, |
|
"learning_rate": 1.6627358490566038e-07, |
|
"loss": 5.2096, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04601769911504425, |
|
"grad_norm": 20.175189971923828, |
|
"learning_rate": 1.6981132075471698e-07, |
|
"loss": 5.0943, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.046902654867256637, |
|
"grad_norm": 13.441214561462402, |
|
"learning_rate": 1.733490566037736e-07, |
|
"loss": 4.9038, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.047787610619469026, |
|
"grad_norm": 13.396607398986816, |
|
"learning_rate": 1.768867924528302e-07, |
|
"loss": 4.6479, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.048672566371681415, |
|
"grad_norm": 13.68046760559082, |
|
"learning_rate": 1.804245283018868e-07, |
|
"loss": 5.5098, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.049557522123893805, |
|
"grad_norm": 13.278443336486816, |
|
"learning_rate": 1.839622641509434e-07, |
|
"loss": 4.6979, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.050442477876106194, |
|
"grad_norm": 15.295453071594238, |
|
"learning_rate": 1.875e-07, |
|
"loss": 3.1969, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05132743362831858, |
|
"grad_norm": 12.185781478881836, |
|
"learning_rate": 1.910377358490566e-07, |
|
"loss": 4.4127, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05221238938053097, |
|
"grad_norm": 10.874494552612305, |
|
"learning_rate": 1.9457547169811322e-07, |
|
"loss": 3.7746, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05309734513274336, |
|
"grad_norm": 9.654823303222656, |
|
"learning_rate": 1.9811320754716982e-07, |
|
"loss": 4.5378, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05398230088495575, |
|
"grad_norm": 21.123645782470703, |
|
"learning_rate": 2.016509433962264e-07, |
|
"loss": 5.0209, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05486725663716814, |
|
"grad_norm": 33.47934341430664, |
|
"learning_rate": 2.0518867924528303e-07, |
|
"loss": 6.5936, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05575221238938053, |
|
"grad_norm": 10.2566556930542, |
|
"learning_rate": 2.0872641509433963e-07, |
|
"loss": 4.2315, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05663716814159292, |
|
"grad_norm": 28.198625564575195, |
|
"learning_rate": 2.1226415094339622e-07, |
|
"loss": 6.4269, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05752212389380531, |
|
"grad_norm": 9.386558532714844, |
|
"learning_rate": 2.1580188679245282e-07, |
|
"loss": 4.2644, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0584070796460177, |
|
"grad_norm": 12.687555313110352, |
|
"learning_rate": 2.1933962264150944e-07, |
|
"loss": 5.1388, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05929203539823009, |
|
"grad_norm": 14.834878921508789, |
|
"learning_rate": 2.2287735849056603e-07, |
|
"loss": 5.1852, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06017699115044248, |
|
"grad_norm": 10.888677597045898, |
|
"learning_rate": 2.2641509433962263e-07, |
|
"loss": 4.8057, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.061061946902654866, |
|
"grad_norm": 13.97256851196289, |
|
"learning_rate": 2.2995283018867925e-07, |
|
"loss": 3.1725, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.061946902654867256, |
|
"grad_norm": 11.82534122467041, |
|
"learning_rate": 2.3349056603773584e-07, |
|
"loss": 3.3322, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06283185840707965, |
|
"grad_norm": 16.99266242980957, |
|
"learning_rate": 2.3702830188679244e-07, |
|
"loss": 5.139, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06371681415929203, |
|
"grad_norm": 8.74513053894043, |
|
"learning_rate": 2.4056603773584903e-07, |
|
"loss": 4.307, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06460176991150443, |
|
"grad_norm": 11.715869903564453, |
|
"learning_rate": 2.4410377358490563e-07, |
|
"loss": 5.0133, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06548672566371681, |
|
"grad_norm": 9.844196319580078, |
|
"learning_rate": 2.476415094339623e-07, |
|
"loss": 4.0507, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06637168141592921, |
|
"grad_norm": 12.447444915771484, |
|
"learning_rate": 2.5117924528301887e-07, |
|
"loss": 3.3895, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06725663716814159, |
|
"grad_norm": 23.91596794128418, |
|
"learning_rate": 2.5471698113207547e-07, |
|
"loss": 5.6736, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.06814159292035399, |
|
"grad_norm": 9.635603904724121, |
|
"learning_rate": 2.5825471698113206e-07, |
|
"loss": 4.2572, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06902654867256637, |
|
"grad_norm": 14.971665382385254, |
|
"learning_rate": 2.6179245283018866e-07, |
|
"loss": 3.0796, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06991150442477877, |
|
"grad_norm": 11.226128578186035, |
|
"learning_rate": 2.6533018867924525e-07, |
|
"loss": 5.0199, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"grad_norm": 11.01388931274414, |
|
"learning_rate": 2.688679245283019e-07, |
|
"loss": 4.1414, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_Qnli-dev_cosine_accuracy": 0.591796875, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9258557558059692, |
|
"eval_Qnli-dev_cosine_ap": 0.5585355274462735, |
|
"eval_Qnli-dev_cosine_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.750666618347168, |
|
"eval_Qnli-dev_cosine_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_cosine_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_dot_accuracy": 0.591796875, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 711.18359375, |
|
"eval_Qnli-dev_dot_ap": 0.5585297234749824, |
|
"eval_Qnli-dev_dot_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_dot_f1_threshold": 576.5970458984375, |
|
"eval_Qnli-dev_dot_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_dot_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.591796875, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.672666549682617, |
|
"eval_Qnli-dev_euclidean_ap": 0.5585355274462735, |
|
"eval_Qnli-dev_euclidean_f1": 0.6291834002677376, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.553747177124023, |
|
"eval_Qnli-dev_euclidean_precision": 0.4598825831702544, |
|
"eval_Qnli-dev_euclidean_recall": 0.9957627118644068, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.619140625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 188.09068298339844, |
|
"eval_Qnli-dev_manhattan_ap": 0.5898283705050701, |
|
"eval_Qnli-dev_manhattan_f1": 0.6301775147928994, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 237.80462646484375, |
|
"eval_Qnli-dev_manhattan_precision": 0.48409090909090907, |
|
"eval_Qnli-dev_manhattan_recall": 0.902542372881356, |
|
"eval_Qnli-dev_max_accuracy": 0.619140625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 711.18359375, |
|
"eval_Qnli-dev_max_ap": 0.5898283705050701, |
|
"eval_Qnli-dev_max_f1": 0.6301775147928994, |
|
"eval_Qnli-dev_max_f1_threshold": 576.5970458984375, |
|
"eval_Qnli-dev_max_precision": 0.48409090909090907, |
|
"eval_Qnli-dev_max_recall": 0.9957627118644068, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.983686089515686, |
|
"eval_allNLI-dev_cosine_ap": 0.34411819659341086, |
|
"eval_allNLI-dev_cosine_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7642872333526611, |
|
"eval_allNLI-dev_cosine_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_cosine_recall": 1.0, |
|
"eval_allNLI-dev_dot_accuracy": 0.666015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 755.60302734375, |
|
"eval_allNLI-dev_dot_ap": 0.344109544232086, |
|
"eval_allNLI-dev_dot_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_dot_f1_threshold": 587.0625, |
|
"eval_allNLI-dev_dot_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_dot_recall": 1.0, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.00581693649292, |
|
"eval_allNLI-dev_euclidean_ap": 0.3441246898925644, |
|
"eval_allNLI-dev_euclidean_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 19.022436141967773, |
|
"eval_allNLI-dev_euclidean_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_euclidean_recall": 1.0, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 62.69102096557617, |
|
"eval_allNLI-dev_manhattan_ap": 0.35131239981425566, |
|
"eval_allNLI-dev_manhattan_f1": 0.5058479532163743, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 337.6861877441406, |
|
"eval_allNLI-dev_manhattan_precision": 0.3385518590998043, |
|
"eval_allNLI-dev_manhattan_recall": 1.0, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 755.60302734375, |
|
"eval_allNLI-dev_max_ap": 0.35131239981425566, |
|
"eval_allNLI-dev_max_f1": 0.5065885797950219, |
|
"eval_allNLI-dev_max_f1_threshold": 587.0625, |
|
"eval_allNLI-dev_max_precision": 0.3392156862745098, |
|
"eval_allNLI-dev_max_recall": 1.0, |
|
"eval_sequential_score": 0.5898283705050701, |
|
"eval_sts-test_pearson_cosine": 0.22248205020578934, |
|
"eval_sts-test_pearson_dot": 0.22239084967931927, |
|
"eval_sts-test_pearson_euclidean": 0.2323160413842197, |
|
"eval_sts-test_pearson_manhattan": 0.26632593273308647, |
|
"eval_sts-test_pearson_max": 0.26632593273308647, |
|
"eval_sts-test_spearman_cosine": 0.24802235964390085, |
|
"eval_sts-test_spearman_dot": 0.24791612015173234, |
|
"eval_sts-test_spearman_euclidean": 0.24799036249272113, |
|
"eval_sts-test_spearman_manhattan": 0.2843623073856928, |
|
"eval_sts-test_spearman_max": 0.2843623073856928, |
|
"eval_vitaminc-pairs_loss": 2.7793872356414795, |
|
"eval_vitaminc-pairs_runtime": 3.7649, |
|
"eval_vitaminc-pairs_samples_per_second": 33.998, |
|
"eval_vitaminc-pairs_steps_per_second": 0.266, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_negation-triplets_loss": 4.888970851898193, |
|
"eval_negation-triplets_runtime": 0.7134, |
|
"eval_negation-triplets_samples_per_second": 179.432, |
|
"eval_negation-triplets_steps_per_second": 1.402, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_scitail-pairs-pos_loss": 1.8996644020080566, |
|
"eval_scitail-pairs-pos_runtime": 0.8506, |
|
"eval_scitail-pairs-pos_samples_per_second": 150.477, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.176, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_scitail-pairs-qa_loss": 2.6760551929473877, |
|
"eval_scitail-pairs-qa_runtime": 0.5685, |
|
"eval_scitail-pairs-qa_samples_per_second": 225.171, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.759, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_xsum-pairs_loss": 6.209648609161377, |
|
"eval_xsum-pairs_runtime": 2.9221, |
|
"eval_xsum-pairs_samples_per_second": 43.804, |
|
"eval_xsum-pairs_steps_per_second": 0.342, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_sciq_pairs_loss": 0.7622462511062622, |
|
"eval_sciq_pairs_runtime": 3.7816, |
|
"eval_sciq_pairs_samples_per_second": 33.848, |
|
"eval_sciq_pairs_steps_per_second": 0.264, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_qasc_pairs_loss": 3.3129472732543945, |
|
"eval_qasc_pairs_runtime": 0.6761, |
|
"eval_qasc_pairs_samples_per_second": 189.334, |
|
"eval_qasc_pairs_steps_per_second": 1.479, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_openbookqa_pairs_loss": 4.549765586853027, |
|
"eval_openbookqa_pairs_runtime": 0.5767, |
|
"eval_openbookqa_pairs_samples_per_second": 221.954, |
|
"eval_openbookqa_pairs_steps_per_second": 1.734, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_msmarco_pairs_loss": 7.205582141876221, |
|
"eval_msmarco_pairs_runtime": 1.2621, |
|
"eval_msmarco_pairs_samples_per_second": 101.416, |
|
"eval_msmarco_pairs_steps_per_second": 0.792, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_nq_pairs_loss": 7.680945873260498, |
|
"eval_nq_pairs_runtime": 2.5052, |
|
"eval_nq_pairs_samples_per_second": 51.095, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_trivia_pairs_loss": 6.37924861907959, |
|
"eval_trivia_pairs_runtime": 3.6293, |
|
"eval_trivia_pairs_samples_per_second": 35.268, |
|
"eval_trivia_pairs_steps_per_second": 0.276, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_gooaq_pairs_loss": 6.656675338745117, |
|
"eval_gooaq_pairs_runtime": 0.9698, |
|
"eval_gooaq_pairs_samples_per_second": 131.988, |
|
"eval_gooaq_pairs_steps_per_second": 1.031, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_paws-pos_loss": 1.3848179578781128, |
|
"eval_paws-pos_runtime": 0.6727, |
|
"eval_paws-pos_samples_per_second": 190.278, |
|
"eval_paws-pos_steps_per_second": 1.487, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07079646017699115, |
|
"eval_global_dataset_loss": 5.002967834472656, |
|
"eval_global_dataset_runtime": 23.048, |
|
"eval_global_dataset_samples_per_second": 28.766, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07168141592920355, |
|
"grad_norm": 18.9890193939209, |
|
"learning_rate": 2.724056603773585e-07, |
|
"loss": 5.8604, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07256637168141593, |
|
"grad_norm": 8.206193923950195, |
|
"learning_rate": 2.759433962264151e-07, |
|
"loss": 4.3003, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07345132743362832, |
|
"grad_norm": 10.03178882598877, |
|
"learning_rate": 2.794811320754717e-07, |
|
"loss": 4.4568, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0743362831858407, |
|
"grad_norm": 14.74673080444336, |
|
"learning_rate": 2.8301886792452833e-07, |
|
"loss": 4.2747, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0752212389380531, |
|
"grad_norm": 19.097232818603516, |
|
"learning_rate": 2.865566037735849e-07, |
|
"loss": 5.52, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.07610619469026549, |
|
"grad_norm": 14.828218460083008, |
|
"learning_rate": 2.900943396226415e-07, |
|
"loss": 2.7767, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.07699115044247788, |
|
"grad_norm": 9.30789566040039, |
|
"learning_rate": 2.936320754716981e-07, |
|
"loss": 4.397, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07787610619469026, |
|
"grad_norm": 15.119461059570312, |
|
"learning_rate": 2.9716981132075476e-07, |
|
"loss": 5.4449, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07876106194690266, |
|
"grad_norm": 8.459301948547363, |
|
"learning_rate": 3.0070754716981136e-07, |
|
"loss": 4.2706, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07964601769911504, |
|
"grad_norm": 23.59125518798828, |
|
"learning_rate": 3.0424528301886795e-07, |
|
"loss": 6.4759, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08053097345132744, |
|
"grad_norm": 8.729449272155762, |
|
"learning_rate": 3.0778301886792455e-07, |
|
"loss": 4.1951, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08141592920353982, |
|
"grad_norm": 8.37271785736084, |
|
"learning_rate": 3.1132075471698114e-07, |
|
"loss": 4.6328, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08230088495575222, |
|
"grad_norm": 10.029474258422852, |
|
"learning_rate": 3.1485849056603774e-07, |
|
"loss": 4.1278, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.0831858407079646, |
|
"grad_norm": 8.706567764282227, |
|
"learning_rate": 3.183962264150944e-07, |
|
"loss": 4.1787, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.084070796460177, |
|
"grad_norm": 13.88837718963623, |
|
"learning_rate": 3.21933962264151e-07, |
|
"loss": 5.2156, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08495575221238938, |
|
"grad_norm": 12.01068115234375, |
|
"learning_rate": 3.254716981132076e-07, |
|
"loss": 3.1403, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.08584070796460178, |
|
"grad_norm": 8.432968139648438, |
|
"learning_rate": 3.2900943396226417e-07, |
|
"loss": 4.0273, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08672566371681416, |
|
"grad_norm": 12.645098686218262, |
|
"learning_rate": 3.3254716981132077e-07, |
|
"loss": 3.0624, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08761061946902655, |
|
"grad_norm": 11.483688354492188, |
|
"learning_rate": 3.3608490566037736e-07, |
|
"loss": 4.6786, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08849557522123894, |
|
"grad_norm": 8.645537376403809, |
|
"learning_rate": 3.3962264150943395e-07, |
|
"loss": 4.1505, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08938053097345133, |
|
"grad_norm": 13.053335189819336, |
|
"learning_rate": 3.431603773584906e-07, |
|
"loss": 2.9529, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09026548672566372, |
|
"grad_norm": 14.494400978088379, |
|
"learning_rate": 3.466981132075472e-07, |
|
"loss": 4.7048, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09115044247787611, |
|
"grad_norm": 9.513616561889648, |
|
"learning_rate": 3.502358490566038e-07, |
|
"loss": 4.7388, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.0920353982300885, |
|
"grad_norm": 9.751347541809082, |
|
"learning_rate": 3.537735849056604e-07, |
|
"loss": 3.7879, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.09292035398230089, |
|
"grad_norm": 9.06558895111084, |
|
"learning_rate": 3.57311320754717e-07, |
|
"loss": 4.0311, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09380530973451327, |
|
"grad_norm": 9.53257942199707, |
|
"learning_rate": 3.608490566037736e-07, |
|
"loss": 4.1314, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09469026548672567, |
|
"grad_norm": 11.554676055908203, |
|
"learning_rate": 3.643867924528302e-07, |
|
"loss": 4.9411, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.09557522123893805, |
|
"grad_norm": 8.559597969055176, |
|
"learning_rate": 3.679245283018868e-07, |
|
"loss": 4.1118, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.09646017699115045, |
|
"grad_norm": 10.008039474487305, |
|
"learning_rate": 3.714622641509434e-07, |
|
"loss": 3.6971, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.09734513274336283, |
|
"grad_norm": 16.543254852294922, |
|
"learning_rate": 3.75e-07, |
|
"loss": 5.605, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.09823008849557523, |
|
"grad_norm": 11.816540718078613, |
|
"learning_rate": 3.7853773584905666e-07, |
|
"loss": 3.4563, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.09911504424778761, |
|
"grad_norm": 10.638028144836426, |
|
"learning_rate": 3.820754716981132e-07, |
|
"loss": 3.7422, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 8.5276460647583, |
|
"learning_rate": 3.8561320754716985e-07, |
|
"loss": 3.8055, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.10088495575221239, |
|
"grad_norm": 13.437420845031738, |
|
"learning_rate": 3.8915094339622644e-07, |
|
"loss": 5.2369, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.10176991150442478, |
|
"grad_norm": 21.039424896240234, |
|
"learning_rate": 3.926886792452831e-07, |
|
"loss": 5.6518, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.10265486725663717, |
|
"grad_norm": 13.487382888793945, |
|
"learning_rate": 3.9622641509433963e-07, |
|
"loss": 3.2906, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.10353982300884956, |
|
"grad_norm": 11.895822525024414, |
|
"learning_rate": 3.997641509433963e-07, |
|
"loss": 3.4996, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10442477876106195, |
|
"grad_norm": 10.83902359008789, |
|
"learning_rate": 4.033018867924528e-07, |
|
"loss": 3.6283, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.10530973451327434, |
|
"grad_norm": 10.552660942077637, |
|
"learning_rate": 4.0683962264150947e-07, |
|
"loss": 4.1487, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"grad_norm": 9.924088478088379, |
|
"learning_rate": 4.1037735849056606e-07, |
|
"loss": 4.3996, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_Qnli-dev_cosine_accuracy": 0.595703125, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9275249242782593, |
|
"eval_Qnli-dev_cosine_ap": 0.5645920090286662, |
|
"eval_Qnli-dev_cosine_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7267085313796997, |
|
"eval_Qnli-dev_cosine_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_cosine_recall": 1.0, |
|
"eval_Qnli-dev_dot_accuracy": 0.595703125, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 712.4608154296875, |
|
"eval_Qnli-dev_dot_ap": 0.5646837736357366, |
|
"eval_Qnli-dev_dot_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_dot_f1_threshold": 558.2177734375, |
|
"eval_Qnli-dev_dot_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_dot_recall": 1.0, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.595703125, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 10.551876068115234, |
|
"eval_Qnli-dev_euclidean_ap": 0.5645997569733668, |
|
"eval_Qnli-dev_euclidean_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 20.490163803100586, |
|
"eval_Qnli-dev_euclidean_precision": 0.4627450980392157, |
|
"eval_Qnli-dev_euclidean_recall": 1.0, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.626953125, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 195.12744140625, |
|
"eval_Qnli-dev_manhattan_ap": 0.5975206086733145, |
|
"eval_Qnli-dev_manhattan_f1": 0.6322008862629247, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 256.6172180175781, |
|
"eval_Qnli-dev_manhattan_precision": 0.4852607709750567, |
|
"eval_Qnli-dev_manhattan_recall": 0.9067796610169492, |
|
"eval_Qnli-dev_max_accuracy": 0.626953125, |
|
"eval_Qnli-dev_max_accuracy_threshold": 712.4608154296875, |
|
"eval_Qnli-dev_max_ap": 0.5975206086733145, |
|
"eval_Qnli-dev_max_f1": 0.6327077747989276, |
|
"eval_Qnli-dev_max_f1_threshold": 558.2177734375, |
|
"eval_Qnli-dev_max_precision": 0.4852607709750567, |
|
"eval_Qnli-dev_max_recall": 1.0, |
|
"eval_allNLI-dev_cosine_accuracy": 0.666015625, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.983871340751648, |
|
"eval_allNLI-dev_cosine_ap": 0.36035507065342104, |
|
"eval_allNLI-dev_cosine_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.7787582874298096, |
|
"eval_allNLI-dev_cosine_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_cosine_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_dot_accuracy": 0.666015625, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 755.7670288085938, |
|
"eval_allNLI-dev_dot_ap": 0.36031241443166284, |
|
"eval_allNLI-dev_dot_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_dot_f1_threshold": 598.2041625976562, |
|
"eval_allNLI-dev_dot_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_dot_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.666015625, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 4.964720249176025, |
|
"eval_allNLI-dev_euclidean_ap": 0.36035507065342104, |
|
"eval_allNLI-dev_euclidean_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 18.434789657592773, |
|
"eval_allNLI-dev_euclidean_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_euclidean_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 66.59053039550781, |
|
"eval_allNLI-dev_manhattan_ap": 0.3692975841596879, |
|
"eval_allNLI-dev_manhattan_f1": 0.5029239766081871, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 380.123779296875, |
|
"eval_allNLI-dev_manhattan_precision": 0.33659491193737767, |
|
"eval_allNLI-dev_manhattan_recall": 0.9942196531791907, |
|
"eval_allNLI-dev_max_accuracy": 0.666015625, |
|
"eval_allNLI-dev_max_accuracy_threshold": 755.7670288085938, |
|
"eval_allNLI-dev_max_ap": 0.3692975841596879, |
|
"eval_allNLI-dev_max_f1": 0.5051395007342143, |
|
"eval_allNLI-dev_max_f1_threshold": 598.2041625976562, |
|
"eval_allNLI-dev_max_precision": 0.33858267716535434, |
|
"eval_allNLI-dev_max_recall": 0.9942196531791907, |
|
"eval_sequential_score": 0.5975206086733145, |
|
"eval_sts-test_pearson_cosine": 0.2980667522290251, |
|
"eval_sts-test_pearson_dot": 0.29795063801865274, |
|
"eval_sts-test_pearson_euclidean": 0.30279956330153407, |
|
"eval_sts-test_pearson_manhattan": 0.32939035635624725, |
|
"eval_sts-test_pearson_max": 0.32939035635624725, |
|
"eval_sts-test_spearman_cosine": 0.3148821747085771, |
|
"eval_sts-test_spearman_dot": 0.3149517475826025, |
|
"eval_sts-test_spearman_euclidean": 0.31489636085812106, |
|
"eval_sts-test_spearman_manhattan": 0.34558301612848313, |
|
"eval_sts-test_spearman_max": 0.34558301612848313, |
|
"eval_vitaminc-pairs_loss": 2.727938652038574, |
|
"eval_vitaminc-pairs_runtime": 3.7459, |
|
"eval_vitaminc-pairs_samples_per_second": 34.17, |
|
"eval_vitaminc-pairs_steps_per_second": 0.267, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_negation-triplets_loss": 4.394620418548584, |
|
"eval_negation-triplets_runtime": 0.7078, |
|
"eval_negation-triplets_samples_per_second": 180.852, |
|
"eval_negation-triplets_steps_per_second": 1.413, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_scitail-pairs-pos_loss": 1.4130322933197021, |
|
"eval_scitail-pairs-pos_runtime": 0.8587, |
|
"eval_scitail-pairs-pos_samples_per_second": 149.07, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.165, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_scitail-pairs-qa_loss": 2.1150403022766113, |
|
"eval_scitail-pairs-qa_runtime": 0.549, |
|
"eval_scitail-pairs-qa_samples_per_second": 233.163, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.822, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_xsum-pairs_loss": 6.048598289489746, |
|
"eval_xsum-pairs_runtime": 2.9142, |
|
"eval_xsum-pairs_samples_per_second": 43.923, |
|
"eval_xsum-pairs_steps_per_second": 0.343, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_sciq_pairs_loss": 0.7171850800514221, |
|
"eval_sciq_pairs_runtime": 3.7786, |
|
"eval_sciq_pairs_samples_per_second": 33.875, |
|
"eval_sciq_pairs_steps_per_second": 0.265, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_qasc_pairs_loss": 2.96693754196167, |
|
"eval_qasc_pairs_runtime": 0.6718, |
|
"eval_qasc_pairs_samples_per_second": 190.538, |
|
"eval_qasc_pairs_steps_per_second": 1.489, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_openbookqa_pairs_loss": 4.418018341064453, |
|
"eval_openbookqa_pairs_runtime": 0.577, |
|
"eval_openbookqa_pairs_samples_per_second": 221.852, |
|
"eval_openbookqa_pairs_steps_per_second": 1.733, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_msmarco_pairs_loss": 6.302182197570801, |
|
"eval_msmarco_pairs_runtime": 1.2547, |
|
"eval_msmarco_pairs_samples_per_second": 102.016, |
|
"eval_msmarco_pairs_steps_per_second": 0.797, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_nq_pairs_loss": 6.841231822967529, |
|
"eval_nq_pairs_runtime": 2.5052, |
|
"eval_nq_pairs_samples_per_second": 51.094, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_trivia_pairs_loss": 6.201311111450195, |
|
"eval_trivia_pairs_runtime": 3.6311, |
|
"eval_trivia_pairs_samples_per_second": 35.251, |
|
"eval_trivia_pairs_steps_per_second": 0.275, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_gooaq_pairs_loss": 6.098212718963623, |
|
"eval_gooaq_pairs_runtime": 0.9643, |
|
"eval_gooaq_pairs_samples_per_second": 132.741, |
|
"eval_gooaq_pairs_steps_per_second": 1.037, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_paws-pos_loss": 0.9473956823348999, |
|
"eval_paws-pos_runtime": 0.6684, |
|
"eval_paws-pos_samples_per_second": 191.51, |
|
"eval_paws-pos_steps_per_second": 1.496, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10619469026548672, |
|
"eval_global_dataset_loss": 4.385201454162598, |
|
"eval_global_dataset_runtime": 23.0455, |
|
"eval_global_dataset_samples_per_second": 28.769, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.10707964601769912, |
|
"grad_norm": 12.284002304077148, |
|
"learning_rate": 4.1391509433962266e-07, |
|
"loss": 3.5291, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1079646017699115, |
|
"grad_norm": 10.567977905273438, |
|
"learning_rate": 4.1745283018867925e-07, |
|
"loss": 3.8232, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.1088495575221239, |
|
"grad_norm": 11.508279800415039, |
|
"learning_rate": 4.209905660377359e-07, |
|
"loss": 4.6035, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.10973451327433628, |
|
"grad_norm": 10.180809020996094, |
|
"learning_rate": 4.2452830188679244e-07, |
|
"loss": 3.7607, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11061946902654868, |
|
"grad_norm": 9.519749641418457, |
|
"learning_rate": 4.280660377358491e-07, |
|
"loss": 3.8461, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11150442477876106, |
|
"grad_norm": 11.971588134765625, |
|
"learning_rate": 4.3160377358490563e-07, |
|
"loss": 3.3413, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11238938053097346, |
|
"grad_norm": 9.211153984069824, |
|
"learning_rate": 4.351415094339623e-07, |
|
"loss": 4.2777, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11327433628318584, |
|
"grad_norm": 12.393014907836914, |
|
"learning_rate": 4.386792452830189e-07, |
|
"loss": 4.3597, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.11415929203539824, |
|
"grad_norm": 14.332024574279785, |
|
"learning_rate": 4.422169811320755e-07, |
|
"loss": 3.9046, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.11504424778761062, |
|
"grad_norm": 10.091246604919434, |
|
"learning_rate": 4.4575471698113207e-07, |
|
"loss": 4.0527, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.11592920353982301, |
|
"grad_norm": 15.043377876281738, |
|
"learning_rate": 4.492924528301887e-07, |
|
"loss": 5.0883, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1168141592920354, |
|
"grad_norm": 12.942100524902344, |
|
"learning_rate": 4.5283018867924526e-07, |
|
"loss": 3.8308, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.11769911504424779, |
|
"grad_norm": 11.961737632751465, |
|
"learning_rate": 4.563679245283019e-07, |
|
"loss": 3.572, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.11858407079646018, |
|
"grad_norm": 12.325026512145996, |
|
"learning_rate": 4.599056603773585e-07, |
|
"loss": 3.4299, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.11946902654867257, |
|
"grad_norm": 12.118773460388184, |
|
"learning_rate": 4.6344339622641515e-07, |
|
"loss": 4.1541, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.12035398230088495, |
|
"grad_norm": 11.99026107788086, |
|
"learning_rate": 4.669811320754717e-07, |
|
"loss": 3.584, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.12123893805309735, |
|
"grad_norm": 15.083515167236328, |
|
"learning_rate": 4.7051886792452834e-07, |
|
"loss": 5.0977, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12212389380530973, |
|
"grad_norm": 15.059394836425781, |
|
"learning_rate": 4.740566037735849e-07, |
|
"loss": 4.6769, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12300884955752213, |
|
"grad_norm": 8.864882469177246, |
|
"learning_rate": 4.775943396226415e-07, |
|
"loss": 3.8396, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.12389380530973451, |
|
"grad_norm": 12.116555213928223, |
|
"learning_rate": 4.811320754716981e-07, |
|
"loss": 3.2875, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.12477876106194691, |
|
"grad_norm": 14.214646339416504, |
|
"learning_rate": 4.846698113207547e-07, |
|
"loss": 4.1946, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1256637168141593, |
|
"grad_norm": 16.207908630371094, |
|
"learning_rate": 4.882075471698113e-07, |
|
"loss": 4.9602, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.12654867256637167, |
|
"grad_norm": 11.662668228149414, |
|
"learning_rate": 4.917452830188679e-07, |
|
"loss": 4.1531, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.12743362831858407, |
|
"grad_norm": 12.429448127746582, |
|
"learning_rate": 4.952830188679246e-07, |
|
"loss": 3.8351, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.12831858407079647, |
|
"grad_norm": 11.522616386413574, |
|
"learning_rate": 4.988207547169812e-07, |
|
"loss": 3.112, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.12920353982300886, |
|
"grad_norm": 14.556803703308105, |
|
"learning_rate": 5.023584905660377e-07, |
|
"loss": 2.3145, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.13008849557522123, |
|
"grad_norm": 12.348714828491211, |
|
"learning_rate": 5.058962264150944e-07, |
|
"loss": 4.0989, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.13097345132743363, |
|
"grad_norm": 13.150403022766113, |
|
"learning_rate": 5.094339622641509e-07, |
|
"loss": 3.2173, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13185840707964602, |
|
"grad_norm": 12.066205978393555, |
|
"learning_rate": 5.129716981132076e-07, |
|
"loss": 2.7913, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13274336283185842, |
|
"grad_norm": 11.519116401672363, |
|
"learning_rate": 5.165094339622641e-07, |
|
"loss": 3.7627, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1336283185840708, |
|
"grad_norm": 12.59196662902832, |
|
"learning_rate": 5.200471698113208e-07, |
|
"loss": 3.3669, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.13451327433628318, |
|
"grad_norm": 13.791536331176758, |
|
"learning_rate": 5.235849056603773e-07, |
|
"loss": 2.6775, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.13539823008849558, |
|
"grad_norm": 11.906597137451172, |
|
"learning_rate": 5.27122641509434e-07, |
|
"loss": 3.2804, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.13628318584070798, |
|
"grad_norm": 11.267363548278809, |
|
"learning_rate": 5.306603773584905e-07, |
|
"loss": 3.0676, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.13716814159292035, |
|
"grad_norm": 12.373686790466309, |
|
"learning_rate": 5.341981132075471e-07, |
|
"loss": 3.1559, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.13805309734513274, |
|
"grad_norm": 13.258451461791992, |
|
"learning_rate": 5.377358490566038e-07, |
|
"loss": 2.6638, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.13893805309734514, |
|
"grad_norm": 12.79727554321289, |
|
"learning_rate": 5.412735849056604e-07, |
|
"loss": 2.8045, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.13982300884955753, |
|
"grad_norm": 13.88683032989502, |
|
"learning_rate": 5.44811320754717e-07, |
|
"loss": 4.0568, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.1407079646017699, |
|
"grad_norm": 12.57358169555664, |
|
"learning_rate": 5.483490566037736e-07, |
|
"loss": 2.7554, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"grad_norm": 14.520818710327148, |
|
"learning_rate": 5.518867924528302e-07, |
|
"loss": 3.7407, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_Qnli-dev_cosine_accuracy": 0.62890625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.9045097827911377, |
|
"eval_Qnli-dev_cosine_ap": 0.6193527955003784, |
|
"eval_Qnli-dev_cosine_f1": 0.6397415185783522, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.8351442813873291, |
|
"eval_Qnli-dev_cosine_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_cosine_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_dot_accuracy": 0.62890625, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 694.7778930664062, |
|
"eval_Qnli-dev_dot_ap": 0.6194150916988216, |
|
"eval_Qnli-dev_dot_f1": 0.6397415185783522, |
|
"eval_Qnli-dev_dot_f1_threshold": 641.4969482421875, |
|
"eval_Qnli-dev_dot_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_dot_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.62890625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 12.111844062805176, |
|
"eval_Qnli-dev_euclidean_ap": 0.6193576186776235, |
|
"eval_Qnli-dev_euclidean_f1": 0.6397415185783522, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 15.914146423339844, |
|
"eval_Qnli-dev_euclidean_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_euclidean_recall": 0.8389830508474576, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.646484375, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 245.2164306640625, |
|
"eval_Qnli-dev_manhattan_ap": 0.6417015148414534, |
|
"eval_Qnli-dev_manhattan_f1": 0.6521060842433698, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 303.317626953125, |
|
"eval_Qnli-dev_manhattan_precision": 0.5160493827160494, |
|
"eval_Qnli-dev_manhattan_recall": 0.885593220338983, |
|
"eval_Qnli-dev_max_accuracy": 0.646484375, |
|
"eval_Qnli-dev_max_accuracy_threshold": 694.7778930664062, |
|
"eval_Qnli-dev_max_ap": 0.6417015148414534, |
|
"eval_Qnli-dev_max_f1": 0.6521060842433698, |
|
"eval_Qnli-dev_max_f1_threshold": 641.4969482421875, |
|
"eval_Qnli-dev_max_precision": 0.5169712793733682, |
|
"eval_Qnli-dev_max_recall": 0.885593220338983, |
|
"eval_allNLI-dev_cosine_accuracy": 0.66796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9767438173294067, |
|
"eval_allNLI-dev_cosine_ap": 0.38624833037583434, |
|
"eval_allNLI-dev_cosine_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8540960550308228, |
|
"eval_allNLI-dev_cosine_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_cosine_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_dot_accuracy": 0.66796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 750.345458984375, |
|
"eval_allNLI-dev_dot_ap": 0.3862261253421553, |
|
"eval_allNLI-dev_dot_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_dot_f1_threshold": 656.0940551757812, |
|
"eval_allNLI-dev_dot_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_dot_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.66796875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.977196216583252, |
|
"eval_allNLI-dev_euclidean_ap": 0.38624380046547035, |
|
"eval_allNLI-dev_euclidean_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 14.971920013427734, |
|
"eval_allNLI-dev_euclidean_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_euclidean_recall": 0.8092485549132948, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.6640625, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 78.52637481689453, |
|
"eval_allNLI-dev_manhattan_ap": 0.3898187083180651, |
|
"eval_allNLI-dev_manhattan_f1": 0.5062388591800357, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 285.7745361328125, |
|
"eval_allNLI-dev_manhattan_precision": 0.36597938144329895, |
|
"eval_allNLI-dev_manhattan_recall": 0.8208092485549133, |
|
"eval_allNLI-dev_max_accuracy": 0.66796875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 750.345458984375, |
|
"eval_allNLI-dev_max_ap": 0.3898187083180651, |
|
"eval_allNLI-dev_max_f1": 0.5100182149362477, |
|
"eval_allNLI-dev_max_f1_threshold": 656.0940551757812, |
|
"eval_allNLI-dev_max_precision": 0.3723404255319149, |
|
"eval_allNLI-dev_max_recall": 0.8208092485549133, |
|
"eval_sequential_score": 0.6417015148414534, |
|
"eval_sts-test_pearson_cosine": 0.2853943019391156, |
|
"eval_sts-test_pearson_dot": 0.28526334639473966, |
|
"eval_sts-test_pearson_euclidean": 0.29405773952219494, |
|
"eval_sts-test_pearson_manhattan": 0.3110310476615048, |
|
"eval_sts-test_pearson_max": 0.3110310476615048, |
|
"eval_sts-test_spearman_cosine": 0.31414239162305135, |
|
"eval_sts-test_spearman_dot": 0.31380407209449446, |
|
"eval_sts-test_spearman_euclidean": 0.3141516551339523, |
|
"eval_sts-test_spearman_manhattan": 0.3366243060620438, |
|
"eval_sts-test_spearman_max": 0.3366243060620438, |
|
"eval_vitaminc-pairs_loss": 2.7439002990722656, |
|
"eval_vitaminc-pairs_runtime": 3.7639, |
|
"eval_vitaminc-pairs_samples_per_second": 34.007, |
|
"eval_vitaminc-pairs_steps_per_second": 0.266, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_negation-triplets_loss": 4.63640022277832, |
|
"eval_negation-triplets_runtime": 0.7072, |
|
"eval_negation-triplets_samples_per_second": 180.999, |
|
"eval_negation-triplets_steps_per_second": 1.414, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_scitail-pairs-pos_loss": 1.0088545083999634, |
|
"eval_scitail-pairs-pos_runtime": 0.8123, |
|
"eval_scitail-pairs-pos_samples_per_second": 157.577, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.231, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_scitail-pairs-qa_loss": 1.1228678226470947, |
|
"eval_scitail-pairs-qa_runtime": 0.5444, |
|
"eval_scitail-pairs-qa_samples_per_second": 235.115, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.837, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_xsum-pairs_loss": 5.4869818687438965, |
|
"eval_xsum-pairs_runtime": 2.8888, |
|
"eval_xsum-pairs_samples_per_second": 44.308, |
|
"eval_xsum-pairs_steps_per_second": 0.346, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_sciq_pairs_loss": 0.628353476524353, |
|
"eval_sciq_pairs_runtime": 3.8061, |
|
"eval_sciq_pairs_samples_per_second": 33.631, |
|
"eval_sciq_pairs_steps_per_second": 0.263, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_qasc_pairs_loss": 2.593322277069092, |
|
"eval_qasc_pairs_runtime": 0.6728, |
|
"eval_qasc_pairs_samples_per_second": 190.241, |
|
"eval_qasc_pairs_steps_per_second": 1.486, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_openbookqa_pairs_loss": 4.394308090209961, |
|
"eval_openbookqa_pairs_runtime": 0.5852, |
|
"eval_openbookqa_pairs_samples_per_second": 218.729, |
|
"eval_openbookqa_pairs_steps_per_second": 1.709, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_msmarco_pairs_loss": 5.656517505645752, |
|
"eval_msmarco_pairs_runtime": 1.2571, |
|
"eval_msmarco_pairs_samples_per_second": 101.822, |
|
"eval_msmarco_pairs_steps_per_second": 0.795, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_nq_pairs_loss": 5.986983776092529, |
|
"eval_nq_pairs_runtime": 2.5075, |
|
"eval_nq_pairs_samples_per_second": 51.047, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_trivia_pairs_loss": 5.694415092468262, |
|
"eval_trivia_pairs_runtime": 3.6302, |
|
"eval_trivia_pairs_samples_per_second": 35.26, |
|
"eval_trivia_pairs_steps_per_second": 0.275, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_gooaq_pairs_loss": 5.3856658935546875, |
|
"eval_gooaq_pairs_runtime": 0.9618, |
|
"eval_gooaq_pairs_samples_per_second": 133.082, |
|
"eval_gooaq_pairs_steps_per_second": 1.04, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_paws-pos_loss": 0.3622308671474457, |
|
"eval_paws-pos_runtime": 0.6678, |
|
"eval_paws-pos_samples_per_second": 191.674, |
|
"eval_paws-pos_steps_per_second": 1.497, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1415929203539823, |
|
"eval_global_dataset_loss": 3.401135206222534, |
|
"eval_global_dataset_runtime": 23.0422, |
|
"eval_global_dataset_samples_per_second": 28.773, |
|
"eval_global_dataset_steps_per_second": 0.26, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1424778761061947, |
|
"grad_norm": 15.203566551208496, |
|
"learning_rate": 5.554245283018868e-07, |
|
"loss": 3.4324, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1433628318584071, |
|
"grad_norm": 17.9180850982666, |
|
"learning_rate": 5.589622641509434e-07, |
|
"loss": 3.6658, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.14424778761061946, |
|
"grad_norm": 15.438867568969727, |
|
"learning_rate": 5.625e-07, |
|
"loss": 3.96, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.14513274336283186, |
|
"grad_norm": 11.754356384277344, |
|
"learning_rate": 5.660377358490567e-07, |
|
"loss": 2.3167, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.14601769911504425, |
|
"grad_norm": 14.934159278869629, |
|
"learning_rate": 5.695754716981132e-07, |
|
"loss": 3.6345, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.14690265486725665, |
|
"grad_norm": 12.897971153259277, |
|
"learning_rate": 5.731132075471699e-07, |
|
"loss": 2.462, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.14778761061946902, |
|
"grad_norm": 9.75023365020752, |
|
"learning_rate": 5.766509433962264e-07, |
|
"loss": 1.4742, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.1486725663716814, |
|
"grad_norm": 19.08034324645996, |
|
"learning_rate": 5.80188679245283e-07, |
|
"loss": 4.7312, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.1495575221238938, |
|
"grad_norm": 11.19345760345459, |
|
"learning_rate": 5.837264150943396e-07, |
|
"loss": 2.6785, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.1504424778761062, |
|
"grad_norm": 15.673795700073242, |
|
"learning_rate": 5.872641509433962e-07, |
|
"loss": 3.449, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15132743362831858, |
|
"grad_norm": 13.0908842086792, |
|
"learning_rate": 5.908018867924529e-07, |
|
"loss": 2.437, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.15221238938053097, |
|
"grad_norm": 18.692623138427734, |
|
"learning_rate": 5.943396226415095e-07, |
|
"loss": 4.2431, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.15309734513274337, |
|
"grad_norm": 18.569456100463867, |
|
"learning_rate": 5.978773584905661e-07, |
|
"loss": 4.4848, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.15398230088495576, |
|
"grad_norm": 13.657751083374023, |
|
"learning_rate": 6.014150943396227e-07, |
|
"loss": 2.5575, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.15486725663716813, |
|
"grad_norm": 13.810693740844727, |
|
"learning_rate": 6.049528301886793e-07, |
|
"loss": 2.3798, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.15575221238938053, |
|
"grad_norm": 19.44959831237793, |
|
"learning_rate": 6.084905660377359e-07, |
|
"loss": 4.4939, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.15663716814159293, |
|
"grad_norm": 18.276161193847656, |
|
"learning_rate": 6.120283018867924e-07, |
|
"loss": 4.1285, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.15752212389380532, |
|
"grad_norm": 12.755691528320312, |
|
"learning_rate": 6.155660377358491e-07, |
|
"loss": 3.0096, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1584070796460177, |
|
"grad_norm": 17.114940643310547, |
|
"learning_rate": 6.191037735849056e-07, |
|
"loss": 4.4431, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.1592920353982301, |
|
"grad_norm": 16.182861328125, |
|
"learning_rate": 6.226415094339623e-07, |
|
"loss": 3.1172, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16017699115044248, |
|
"grad_norm": 11.372871398925781, |
|
"learning_rate": 6.261792452830188e-07, |
|
"loss": 2.3576, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.16106194690265488, |
|
"grad_norm": 15.957989692687988, |
|
"learning_rate": 6.297169811320755e-07, |
|
"loss": 3.7849, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.16194690265486725, |
|
"grad_norm": 16.545942306518555, |
|
"learning_rate": 6.332547169811321e-07, |
|
"loss": 3.679, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.16283185840707964, |
|
"grad_norm": 12.20029354095459, |
|
"learning_rate": 6.367924528301888e-07, |
|
"loss": 3.1949, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.16371681415929204, |
|
"grad_norm": 12.985442161560059, |
|
"learning_rate": 6.403301886792453e-07, |
|
"loss": 3.2422, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.16460176991150444, |
|
"grad_norm": 16.652189254760742, |
|
"learning_rate": 6.43867924528302e-07, |
|
"loss": 2.9905, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1654867256637168, |
|
"grad_norm": 13.374595642089844, |
|
"learning_rate": 6.474056603773585e-07, |
|
"loss": 2.2697, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.1663716814159292, |
|
"grad_norm": 13.449297904968262, |
|
"learning_rate": 6.509433962264152e-07, |
|
"loss": 1.7685, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.1672566371681416, |
|
"grad_norm": 11.019112586975098, |
|
"learning_rate": 6.544811320754717e-07, |
|
"loss": 2.0971, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.168141592920354, |
|
"grad_norm": 14.534268379211426, |
|
"learning_rate": 6.580188679245283e-07, |
|
"loss": 3.4689, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.16902654867256636, |
|
"grad_norm": 10.543577194213867, |
|
"learning_rate": 6.615566037735849e-07, |
|
"loss": 1.6614, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.16991150442477876, |
|
"grad_norm": 11.60018539428711, |
|
"learning_rate": 6.650943396226415e-07, |
|
"loss": 1.9574, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.17079646017699116, |
|
"grad_norm": 12.020721435546875, |
|
"learning_rate": 6.686320754716981e-07, |
|
"loss": 1.9313, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.17168141592920355, |
|
"grad_norm": 13.000722885131836, |
|
"learning_rate": 6.721698113207547e-07, |
|
"loss": 2.2316, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.17256637168141592, |
|
"grad_norm": 14.97183609008789, |
|
"learning_rate": 6.757075471698113e-07, |
|
"loss": 1.9854, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.17345132743362832, |
|
"grad_norm": 13.239171028137207, |
|
"learning_rate": 6.792452830188679e-07, |
|
"loss": 2.8428, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1743362831858407, |
|
"grad_norm": 14.02790355682373, |
|
"learning_rate": 6.827830188679246e-07, |
|
"loss": 2.6916, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.1752212389380531, |
|
"grad_norm": 19.518878936767578, |
|
"learning_rate": 6.863207547169812e-07, |
|
"loss": 3.5193, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.17610619469026548, |
|
"grad_norm": 13.07220458984375, |
|
"learning_rate": 6.898584905660377e-07, |
|
"loss": 3.1681, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"grad_norm": 17.351106643676758, |
|
"learning_rate": 6.933962264150944e-07, |
|
"loss": 2.7377, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_Qnli-dev_cosine_accuracy": 0.654296875, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8833130598068237, |
|
"eval_Qnli-dev_cosine_ap": 0.6580270250237132, |
|
"eval_Qnli-dev_cosine_f1": 0.6530014641288434, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.7697122097015381, |
|
"eval_Qnli-dev_cosine_precision": 0.4988814317673378, |
|
"eval_Qnli-dev_cosine_recall": 0.9449152542372882, |
|
"eval_Qnli-dev_dot_accuracy": 0.654296875, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 678.5490112304688, |
|
"eval_Qnli-dev_dot_ap": 0.6575507232408317, |
|
"eval_Qnli-dev_dot_f1": 0.6530014641288434, |
|
"eval_Qnli-dev_dot_f1_threshold": 591.318115234375, |
|
"eval_Qnli-dev_dot_precision": 0.4988814317673378, |
|
"eval_Qnli-dev_dot_recall": 0.9449152542372882, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.654296875, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 13.389348983764648, |
|
"eval_Qnli-dev_euclidean_ap": 0.6580673873885579, |
|
"eval_Qnli-dev_euclidean_f1": 0.6530014641288434, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 18.810344696044922, |
|
"eval_Qnli-dev_euclidean_precision": 0.4988814317673378, |
|
"eval_Qnli-dev_euclidean_recall": 0.9449152542372882, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.654296875, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 261.8469543457031, |
|
"eval_Qnli-dev_manhattan_ap": 0.6702231337150942, |
|
"eval_Qnli-dev_manhattan_f1": 0.6515397082658022, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 327.61407470703125, |
|
"eval_Qnli-dev_manhattan_precision": 0.5275590551181102, |
|
"eval_Qnli-dev_manhattan_recall": 0.8516949152542372, |
|
"eval_Qnli-dev_max_accuracy": 0.654296875, |
|
"eval_Qnli-dev_max_accuracy_threshold": 678.5490112304688, |
|
"eval_Qnli-dev_max_ap": 0.6702231337150942, |
|
"eval_Qnli-dev_max_f1": 0.6530014641288434, |
|
"eval_Qnli-dev_max_f1_threshold": 591.318115234375, |
|
"eval_Qnli-dev_max_precision": 0.5275590551181102, |
|
"eval_Qnli-dev_max_recall": 0.9449152542372882, |
|
"eval_allNLI-dev_cosine_accuracy": 0.66796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9789057970046997, |
|
"eval_allNLI-dev_cosine_ap": 0.4149758651874867, |
|
"eval_allNLI-dev_cosine_f1": 0.5295404814004376, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8845139741897583, |
|
"eval_allNLI-dev_cosine_precision": 0.426056338028169, |
|
"eval_allNLI-dev_cosine_recall": 0.6994219653179191, |
|
"eval_allNLI-dev_dot_accuracy": 0.66796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 752.1302490234375, |
|
"eval_allNLI-dev_dot_ap": 0.4148843825453817, |
|
"eval_allNLI-dev_dot_f1": 0.5295404814004376, |
|
"eval_allNLI-dev_dot_f1_threshold": 679.59765625, |
|
"eval_allNLI-dev_dot_precision": 0.426056338028169, |
|
"eval_allNLI-dev_dot_recall": 0.6994219653179191, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.66796875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 5.692765712738037, |
|
"eval_allNLI-dev_euclidean_ap": 0.4149715623944703, |
|
"eval_allNLI-dev_euclidean_f1": 0.5295404814004376, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 13.321504592895508, |
|
"eval_allNLI-dev_euclidean_precision": 0.426056338028169, |
|
"eval_allNLI-dev_euclidean_recall": 0.6994219653179191, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.66796875, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 108.60708618164062, |
|
"eval_allNLI-dev_manhattan_ap": 0.4226938762919136, |
|
"eval_allNLI-dev_manhattan_f1": 0.5261261261261262, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 292.82879638671875, |
|
"eval_allNLI-dev_manhattan_precision": 0.38219895287958117, |
|
"eval_allNLI-dev_manhattan_recall": 0.8439306358381503, |
|
"eval_allNLI-dev_max_accuracy": 0.66796875, |
|
"eval_allNLI-dev_max_accuracy_threshold": 752.1302490234375, |
|
"eval_allNLI-dev_max_ap": 0.4226938762919136, |
|
"eval_allNLI-dev_max_f1": 0.5295404814004376, |
|
"eval_allNLI-dev_max_f1_threshold": 679.59765625, |
|
"eval_allNLI-dev_max_precision": 0.426056338028169, |
|
"eval_allNLI-dev_max_recall": 0.8439306358381503, |
|
"eval_sequential_score": 0.6702231337150942, |
|
"eval_sts-test_pearson_cosine": 0.2940990176248945, |
|
"eval_sts-test_pearson_dot": 0.2937915867258836, |
|
"eval_sts-test_pearson_euclidean": 0.3181750373778839, |
|
"eval_sts-test_pearson_manhattan": 0.3345827566037396, |
|
"eval_sts-test_pearson_max": 0.3345827566037396, |
|
"eval_sts-test_spearman_cosine": 0.3520659969675645, |
|
"eval_sts-test_spearman_dot": 0.3519276260704351, |
|
"eval_sts-test_spearman_euclidean": 0.35206747375915737, |
|
"eval_sts-test_spearman_manhattan": 0.3786100728147803, |
|
"eval_sts-test_spearman_max": 0.3786100728147803, |
|
"eval_vitaminc-pairs_loss": 2.7042157649993896, |
|
"eval_vitaminc-pairs_runtime": 3.7507, |
|
"eval_vitaminc-pairs_samples_per_second": 34.127, |
|
"eval_vitaminc-pairs_steps_per_second": 0.267, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_negation-triplets_loss": 4.873472213745117, |
|
"eval_negation-triplets_runtime": 0.7086, |
|
"eval_negation-triplets_samples_per_second": 180.65, |
|
"eval_negation-triplets_steps_per_second": 1.411, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_scitail-pairs-pos_loss": 0.6427631974220276, |
|
"eval_scitail-pairs-pos_runtime": 0.8081, |
|
"eval_scitail-pairs-pos_samples_per_second": 158.403, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.238, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_scitail-pairs-qa_loss": 0.6247898936271667, |
|
"eval_scitail-pairs-qa_runtime": 0.5409, |
|
"eval_scitail-pairs-qa_samples_per_second": 236.639, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.849, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_xsum-pairs_loss": 4.36387300491333, |
|
"eval_xsum-pairs_runtime": 2.8937, |
|
"eval_xsum-pairs_samples_per_second": 44.234, |
|
"eval_xsum-pairs_steps_per_second": 0.346, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_sciq_pairs_loss": 0.47759732604026794, |
|
"eval_sciq_pairs_runtime": 3.7844, |
|
"eval_sciq_pairs_samples_per_second": 33.823, |
|
"eval_sciq_pairs_steps_per_second": 0.264, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_qasc_pairs_loss": 1.8949910402297974, |
|
"eval_qasc_pairs_runtime": 0.6749, |
|
"eval_qasc_pairs_samples_per_second": 189.661, |
|
"eval_qasc_pairs_steps_per_second": 1.482, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_openbookqa_pairs_loss": 3.398245334625244, |
|
"eval_openbookqa_pairs_runtime": 0.5848, |
|
"eval_openbookqa_pairs_samples_per_second": 218.88, |
|
"eval_openbookqa_pairs_steps_per_second": 1.71, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_msmarco_pairs_loss": 4.104819297790527, |
|
"eval_msmarco_pairs_runtime": 1.2633, |
|
"eval_msmarco_pairs_samples_per_second": 101.324, |
|
"eval_msmarco_pairs_steps_per_second": 0.792, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_nq_pairs_loss": 4.759064197540283, |
|
"eval_nq_pairs_runtime": 2.5126, |
|
"eval_nq_pairs_samples_per_second": 50.943, |
|
"eval_nq_pairs_steps_per_second": 0.398, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_trivia_pairs_loss": 4.4567718505859375, |
|
"eval_trivia_pairs_runtime": 3.6274, |
|
"eval_trivia_pairs_samples_per_second": 35.287, |
|
"eval_trivia_pairs_steps_per_second": 0.276, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_gooaq_pairs_loss": 4.16129207611084, |
|
"eval_gooaq_pairs_runtime": 0.9594, |
|
"eval_gooaq_pairs_samples_per_second": 133.413, |
|
"eval_gooaq_pairs_steps_per_second": 1.042, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_paws-pos_loss": 0.18016286194324493, |
|
"eval_paws-pos_runtime": 0.6707, |
|
"eval_paws-pos_samples_per_second": 190.836, |
|
"eval_paws-pos_steps_per_second": 1.491, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17699115044247787, |
|
"eval_global_dataset_loss": 2.4958598613739014, |
|
"eval_global_dataset_runtime": 23.0263, |
|
"eval_global_dataset_samples_per_second": 28.793, |
|
"eval_global_dataset_steps_per_second": 0.261, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.17787610619469027, |
|
"grad_norm": 11.437249183654785, |
|
"learning_rate": 6.969339622641509e-07, |
|
"loss": 1.6408, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.17876106194690267, |
|
"grad_norm": 11.460991859436035, |
|
"learning_rate": 7.004716981132076e-07, |
|
"loss": 2.3864, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.17964601769911503, |
|
"grad_norm": 11.440958976745605, |
|
"learning_rate": 7.040094339622641e-07, |
|
"loss": 2.0848, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.18053097345132743, |
|
"grad_norm": 14.584334373474121, |
|
"learning_rate": 7.075471698113208e-07, |
|
"loss": 2.9074, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.18141592920353983, |
|
"grad_norm": 13.474750518798828, |
|
"learning_rate": 7.110849056603773e-07, |
|
"loss": 2.542, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.18230088495575222, |
|
"grad_norm": 11.853781700134277, |
|
"learning_rate": 7.14622641509434e-07, |
|
"loss": 1.7312, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.1831858407079646, |
|
"grad_norm": 12.289971351623535, |
|
"learning_rate": 7.181603773584905e-07, |
|
"loss": 1.6768, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.184070796460177, |
|
"grad_norm": 16.296539306640625, |
|
"learning_rate": 7.216981132075472e-07, |
|
"loss": 2.531, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.18495575221238938, |
|
"grad_norm": 17.131324768066406, |
|
"learning_rate": 7.252358490566038e-07, |
|
"loss": 2.9222, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.18584070796460178, |
|
"grad_norm": 12.739492416381836, |
|
"learning_rate": 7.287735849056604e-07, |
|
"loss": 2.4152, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.18672566371681415, |
|
"grad_norm": 10.511398315429688, |
|
"learning_rate": 7.32311320754717e-07, |
|
"loss": 1.4345, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.18761061946902655, |
|
"grad_norm": 12.478878021240234, |
|
"learning_rate": 7.358490566037736e-07, |
|
"loss": 1.5864, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.18849557522123894, |
|
"grad_norm": 10.100104331970215, |
|
"learning_rate": 7.393867924528302e-07, |
|
"loss": 1.272, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.18938053097345134, |
|
"grad_norm": 10.771380424499512, |
|
"learning_rate": 7.429245283018868e-07, |
|
"loss": 1.7011, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.1902654867256637, |
|
"grad_norm": 15.291692733764648, |
|
"learning_rate": 7.464622641509434e-07, |
|
"loss": 3.0076, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.1911504424778761, |
|
"grad_norm": 15.702594757080078, |
|
"learning_rate": 7.5e-07, |
|
"loss": 2.468, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.1920353982300885, |
|
"grad_norm": 14.604419708251953, |
|
"learning_rate": 7.535377358490567e-07, |
|
"loss": 2.0796, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.1929203539823009, |
|
"grad_norm": 18.808080673217773, |
|
"learning_rate": 7.570754716981133e-07, |
|
"loss": 2.9735, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.19380530973451326, |
|
"grad_norm": 15.642558097839355, |
|
"learning_rate": 7.606132075471698e-07, |
|
"loss": 2.5506, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.19469026548672566, |
|
"grad_norm": 11.20150089263916, |
|
"learning_rate": 7.641509433962264e-07, |
|
"loss": 1.7307, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.19557522123893806, |
|
"grad_norm": 11.246394157409668, |
|
"learning_rate": 7.67688679245283e-07, |
|
"loss": 1.4519, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.19646017699115045, |
|
"grad_norm": 11.744650840759277, |
|
"learning_rate": 7.712264150943397e-07, |
|
"loss": 1.7292, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.19734513274336282, |
|
"grad_norm": 11.411338806152344, |
|
"learning_rate": 7.747641509433962e-07, |
|
"loss": 1.4664, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.19823008849557522, |
|
"grad_norm": 12.0331392288208, |
|
"learning_rate": 7.783018867924529e-07, |
|
"loss": 1.6201, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.19911504424778761, |
|
"grad_norm": 15.52979564666748, |
|
"learning_rate": 7.818396226415095e-07, |
|
"loss": 2.3483, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 12.872408866882324, |
|
"learning_rate": 7.853773584905662e-07, |
|
"loss": 2.1311, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.20088495575221238, |
|
"grad_norm": 15.520133018493652, |
|
"learning_rate": 7.889150943396226e-07, |
|
"loss": 2.3272, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.20176991150442478, |
|
"grad_norm": 14.214468955993652, |
|
"learning_rate": 7.924528301886793e-07, |
|
"loss": 2.6164, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.20265486725663717, |
|
"grad_norm": 11.147010803222656, |
|
"learning_rate": 7.959905660377359e-07, |
|
"loss": 1.6261, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.20353982300884957, |
|
"grad_norm": 14.623574256896973, |
|
"learning_rate": 7.995283018867926e-07, |
|
"loss": 2.5293, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.20442477876106194, |
|
"grad_norm": 11.993956565856934, |
|
"learning_rate": 8.03066037735849e-07, |
|
"loss": 1.2885, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.20530973451327433, |
|
"grad_norm": 17.700708389282227, |
|
"learning_rate": 8.066037735849056e-07, |
|
"loss": 2.0039, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.20619469026548673, |
|
"grad_norm": 19.518352508544922, |
|
"learning_rate": 8.101415094339623e-07, |
|
"loss": 3.0003, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.20707964601769913, |
|
"grad_norm": 12.744159698486328, |
|
"learning_rate": 8.136792452830189e-07, |
|
"loss": 2.0491, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2079646017699115, |
|
"grad_norm": 11.839001655578613, |
|
"learning_rate": 8.172169811320755e-07, |
|
"loss": 2.0178, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2088495575221239, |
|
"grad_norm": 15.795985221862793, |
|
"learning_rate": 8.207547169811321e-07, |
|
"loss": 1.8532, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2097345132743363, |
|
"grad_norm": 14.999555587768555, |
|
"learning_rate": 8.242924528301888e-07, |
|
"loss": 2.3614, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.21061946902654868, |
|
"grad_norm": 10.802149772644043, |
|
"learning_rate": 8.278301886792453e-07, |
|
"loss": 1.1889, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.21150442477876105, |
|
"grad_norm": 12.038461685180664, |
|
"learning_rate": 8.313679245283019e-07, |
|
"loss": 1.4833, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"grad_norm": 17.69329071044922, |
|
"learning_rate": 8.349056603773585e-07, |
|
"loss": 2.8687, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_Qnli-dev_cosine_accuracy": 0.66015625, |
|
"eval_Qnli-dev_cosine_accuracy_threshold": 0.8744948506355286, |
|
"eval_Qnli-dev_cosine_ap": 0.6610633478265061, |
|
"eval_Qnli-dev_cosine_f1": 0.6646433990895295, |
|
"eval_Qnli-dev_cosine_f1_threshold": 0.753309965133667, |
|
"eval_Qnli-dev_cosine_precision": 0.5177304964539007, |
|
"eval_Qnli-dev_cosine_recall": 0.9279661016949152, |
|
"eval_Qnli-dev_dot_accuracy": 0.66015625, |
|
"eval_Qnli-dev_dot_accuracy_threshold": 670.719970703125, |
|
"eval_Qnli-dev_dot_ap": 0.6607472505349153, |
|
"eval_Qnli-dev_dot_f1": 0.6646433990895295, |
|
"eval_Qnli-dev_dot_f1_threshold": 578.874755859375, |
|
"eval_Qnli-dev_dot_precision": 0.5177304964539007, |
|
"eval_Qnli-dev_dot_recall": 0.9279661016949152, |
|
"eval_Qnli-dev_euclidean_accuracy": 0.66015625, |
|
"eval_Qnli-dev_euclidean_accuracy_threshold": 13.881525039672852, |
|
"eval_Qnli-dev_euclidean_ap": 0.6611053426809266, |
|
"eval_Qnli-dev_euclidean_f1": 0.6646433990895295, |
|
"eval_Qnli-dev_euclidean_f1_threshold": 19.471359252929688, |
|
"eval_Qnli-dev_euclidean_precision": 0.5177304964539007, |
|
"eval_Qnli-dev_euclidean_recall": 0.9279661016949152, |
|
"eval_Qnli-dev_manhattan_accuracy": 0.666015625, |
|
"eval_Qnli-dev_manhattan_accuracy_threshold": 281.9825134277344, |
|
"eval_Qnli-dev_manhattan_ap": 0.6664006509577655, |
|
"eval_Qnli-dev_manhattan_f1": 0.6678899082568808, |
|
"eval_Qnli-dev_manhattan_f1_threshold": 328.83447265625, |
|
"eval_Qnli-dev_manhattan_precision": 0.5889967637540453, |
|
"eval_Qnli-dev_manhattan_recall": 0.7711864406779662, |
|
"eval_Qnli-dev_max_accuracy": 0.666015625, |
|
"eval_Qnli-dev_max_accuracy_threshold": 670.719970703125, |
|
"eval_Qnli-dev_max_ap": 0.6664006509577655, |
|
"eval_Qnli-dev_max_f1": 0.6678899082568808, |
|
"eval_Qnli-dev_max_f1_threshold": 578.874755859375, |
|
"eval_Qnli-dev_max_precision": 0.5889967637540453, |
|
"eval_Qnli-dev_max_recall": 0.9279661016949152, |
|
"eval_allNLI-dev_cosine_accuracy": 0.66796875, |
|
"eval_allNLI-dev_cosine_accuracy_threshold": 0.9727417230606079, |
|
"eval_allNLI-dev_cosine_ap": 0.4443750308487611, |
|
"eval_allNLI-dev_cosine_f1": 0.5338983050847458, |
|
"eval_allNLI-dev_cosine_f1_threshold": 0.8509687781333923, |
|
"eval_allNLI-dev_cosine_precision": 0.4214046822742475, |
|
"eval_allNLI-dev_cosine_recall": 0.7283236994219653, |
|
"eval_allNLI-dev_dot_accuracy": 0.66796875, |
|
"eval_allNLI-dev_dot_accuracy_threshold": 747.4664916992188, |
|
"eval_allNLI-dev_dot_ap": 0.4447331164315086, |
|
"eval_allNLI-dev_dot_f1": 0.5347368421052632, |
|
"eval_allNLI-dev_dot_f1_threshold": 652.6121826171875, |
|
"eval_allNLI-dev_dot_precision": 0.4205298013245033, |
|
"eval_allNLI-dev_dot_recall": 0.7341040462427746, |
|
"eval_allNLI-dev_euclidean_accuracy": 0.66796875, |
|
"eval_allNLI-dev_euclidean_accuracy_threshold": 6.472302436828613, |
|
"eval_allNLI-dev_euclidean_ap": 0.44436910603457025, |
|
"eval_allNLI-dev_euclidean_f1": 0.5338983050847458, |
|
"eval_allNLI-dev_euclidean_f1_threshold": 15.134000778198242, |
|
"eval_allNLI-dev_euclidean_precision": 0.4214046822742475, |
|
"eval_allNLI-dev_euclidean_recall": 0.7283236994219653, |
|
"eval_allNLI-dev_manhattan_accuracy": 0.673828125, |
|
"eval_allNLI-dev_manhattan_accuracy_threshold": 185.35494995117188, |
|
"eval_allNLI-dev_manhattan_ap": 0.45330636568192945, |
|
"eval_allNLI-dev_manhattan_f1": 0.5340909090909091, |
|
"eval_allNLI-dev_manhattan_f1_threshold": 316.48419189453125, |
|
"eval_allNLI-dev_manhattan_precision": 0.3971830985915493, |
|
"eval_allNLI-dev_manhattan_recall": 0.815028901734104, |
|
"eval_allNLI-dev_max_accuracy": 0.673828125, |
|
"eval_allNLI-dev_max_accuracy_threshold": 747.4664916992188, |
|
"eval_allNLI-dev_max_ap": 0.45330636568192945, |
|
"eval_allNLI-dev_max_f1": 0.5347368421052632, |
|
"eval_allNLI-dev_max_f1_threshold": 652.6121826171875, |
|
"eval_allNLI-dev_max_precision": 0.4214046822742475, |
|
"eval_allNLI-dev_max_recall": 0.815028901734104, |
|
"eval_sequential_score": 0.6664006509577655, |
|
"eval_sts-test_pearson_cosine": 0.3977846210139704, |
|
"eval_sts-test_pearson_dot": 0.3974381713503513, |
|
"eval_sts-test_pearson_euclidean": 0.42060129087924125, |
|
"eval_sts-test_pearson_manhattan": 0.43174431600737306, |
|
"eval_sts-test_pearson_max": 0.43174431600737306, |
|
"eval_sts-test_spearman_cosine": 0.44299644096637864, |
|
"eval_sts-test_spearman_dot": 0.4426330607320026, |
|
"eval_sts-test_spearman_euclidean": 0.44300328790921845, |
|
"eval_sts-test_spearman_manhattan": 0.4553695033739603, |
|
"eval_sts-test_spearman_max": 0.4553695033739603, |
|
"eval_vitaminc-pairs_loss": 2.721532106399536, |
|
"eval_vitaminc-pairs_runtime": 3.756, |
|
"eval_vitaminc-pairs_samples_per_second": 34.079, |
|
"eval_vitaminc-pairs_steps_per_second": 0.266, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_negation-triplets_loss": 4.154361248016357, |
|
"eval_negation-triplets_runtime": 0.7084, |
|
"eval_negation-triplets_samples_per_second": 180.694, |
|
"eval_negation-triplets_steps_per_second": 1.412, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_scitail-pairs-pos_loss": 0.4166124761104584, |
|
"eval_scitail-pairs-pos_runtime": 0.8374, |
|
"eval_scitail-pairs-pos_samples_per_second": 152.85, |
|
"eval_scitail-pairs-pos_steps_per_second": 1.194, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_scitail-pairs-qa_loss": 0.3875552713871002, |
|
"eval_scitail-pairs-qa_runtime": 0.5637, |
|
"eval_scitail-pairs-qa_samples_per_second": 227.065, |
|
"eval_scitail-pairs-qa_steps_per_second": 1.774, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_xsum-pairs_loss": 3.315666437149048, |
|
"eval_xsum-pairs_runtime": 2.9088, |
|
"eval_xsum-pairs_samples_per_second": 44.004, |
|
"eval_xsum-pairs_steps_per_second": 0.344, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_sciq_pairs_loss": 0.37107861042022705, |
|
"eval_sciq_pairs_runtime": 3.7824, |
|
"eval_sciq_pairs_samples_per_second": 33.841, |
|
"eval_sciq_pairs_steps_per_second": 0.264, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_qasc_pairs_loss": 1.4818006753921509, |
|
"eval_qasc_pairs_runtime": 0.6698, |
|
"eval_qasc_pairs_samples_per_second": 191.109, |
|
"eval_qasc_pairs_steps_per_second": 1.493, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_openbookqa_pairs_loss": 2.693885564804077, |
|
"eval_openbookqa_pairs_runtime": 0.5747, |
|
"eval_openbookqa_pairs_samples_per_second": 222.725, |
|
"eval_openbookqa_pairs_steps_per_second": 1.74, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_msmarco_pairs_loss": 3.245403528213501, |
|
"eval_msmarco_pairs_runtime": 1.2573, |
|
"eval_msmarco_pairs_samples_per_second": 101.804, |
|
"eval_msmarco_pairs_steps_per_second": 0.795, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_nq_pairs_loss": 3.979779005050659, |
|
"eval_nq_pairs_runtime": 2.5086, |
|
"eval_nq_pairs_samples_per_second": 51.025, |
|
"eval_nq_pairs_steps_per_second": 0.399, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_trivia_pairs_loss": 3.594862937927246, |
|
"eval_trivia_pairs_runtime": 3.6534, |
|
"eval_trivia_pairs_samples_per_second": 35.036, |
|
"eval_trivia_pairs_steps_per_second": 0.274, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_gooaq_pairs_loss": 3.226611852645874, |
|
"eval_gooaq_pairs_runtime": 1.0202, |
|
"eval_gooaq_pairs_samples_per_second": 125.465, |
|
"eval_gooaq_pairs_steps_per_second": 0.98, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_paws-pos_loss": 0.12748712301254272, |
|
"eval_paws-pos_runtime": 0.6844, |
|
"eval_paws-pos_samples_per_second": 187.034, |
|
"eval_paws-pos_steps_per_second": 1.461, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.21238938053097345, |
|
"eval_global_dataset_loss": 1.8867173194885254, |
|
"eval_global_dataset_runtime": 23.0217, |
|
"eval_global_dataset_samples_per_second": 28.799, |
|
"eval_global_dataset_steps_per_second": 0.261, |
|
"step": 240 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 3390, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 80, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 0.0, |
|
"train_batch_size": 42, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|