|
{ |
|
"best_metric": 85.9274, |
|
"best_model_checkpoint": "./output/checkpoint-1600", |
|
"epoch": 1.463719512195122, |
|
"global_step": 1600, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.9496336996337e-05, |
|
"loss": 0.7035, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.8923992673992674e-05, |
|
"loss": 0.3973, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.8351648351648355e-05, |
|
"loss": 0.3092, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.7779304029304036e-05, |
|
"loss": 0.2813, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 4.720695970695971e-05, |
|
"loss": 0.2603, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 4.6634615384615384e-05, |
|
"loss": 0.2371, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.606227106227106e-05, |
|
"loss": 0.2334, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.548992673992674e-05, |
|
"loss": 0.2296, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 4.491758241758242e-05, |
|
"loss": 0.2219, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 4.4345238095238095e-05, |
|
"loss": 0.2078, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4.3772893772893776e-05, |
|
"loss": 0.2123, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 4.320054945054946e-05, |
|
"loss": 0.2063, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 4.262820512820513e-05, |
|
"loss": 0.2002, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 4.2055860805860806e-05, |
|
"loss": 0.2102, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 4.148351648351649e-05, |
|
"loss": 0.2003, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 4.091117216117216e-05, |
|
"loss": 0.2019, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 4.033882783882784e-05, |
|
"loss": 0.1944, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 3.9766483516483516e-05, |
|
"loss": 0.1935, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 3.91941391941392e-05, |
|
"loss": 0.1974, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 3.862179487179488e-05, |
|
"loss": 0.1922, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 3.804945054945055e-05, |
|
"loss": 0.188, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 3.747710622710623e-05, |
|
"loss": 0.1945, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 3.690476190476191e-05, |
|
"loss": 0.1897, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 3.633241758241758e-05, |
|
"loss": 0.1797, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 3.576007326007326e-05, |
|
"loss": 0.1752, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 3.5187728937728944e-05, |
|
"loss": 0.1797, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 3.461538461538462e-05, |
|
"loss": 0.1785, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 3.404304029304029e-05, |
|
"loss": 0.185, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 3.347069597069597e-05, |
|
"loss": 0.1751, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 3.289835164835165e-05, |
|
"loss": 0.1799, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.232600732600733e-05, |
|
"loss": 0.1801, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.1753663003663003e-05, |
|
"loss": 0.1748, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"eval_Bleu_1": 0.8552, |
|
"eval_Bleu_2": 0.0009, |
|
"eval_Bleu_3": 0.0001, |
|
"eval_Bleu_4": 0.0, |
|
"eval_ROUGE_L": 0.8552, |
|
"eval_accuracy": 85.52, |
|
"eval_f1": 85.52, |
|
"eval_gen_len": 2.2084, |
|
"eval_loss": 0.17816422879695892, |
|
"eval_precision": 85.52, |
|
"eval_recall": 85.52, |
|
"eval_rouge1": 85.6587, |
|
"eval_rouge2": 3.7183, |
|
"eval_rougeL": 85.6528, |
|
"eval_rougeLsum": 85.6348, |
|
"eval_runtime": 1004.3102, |
|
"eval_samples_per_second": 34.705, |
|
"eval_steps_per_second": 4.338, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1181318681318685e-05, |
|
"loss": 0.1698, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.0608974358974366e-05, |
|
"loss": 0.1763, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.0036630036630036e-05, |
|
"loss": 0.1655, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.9464285714285718e-05, |
|
"loss": 0.1703, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 2.8891941391941392e-05, |
|
"loss": 0.174, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.831959706959707e-05, |
|
"loss": 0.1723, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 2.774725274725275e-05, |
|
"loss": 0.1739, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 2.7174908424908425e-05, |
|
"loss": 0.1673, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.6602564102564102e-05, |
|
"loss": 0.1647, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 2.6030219780219783e-05, |
|
"loss": 0.1739, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.5457875457875458e-05, |
|
"loss": 0.1703, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 2.488553113553114e-05, |
|
"loss": 0.1727, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 2.4313186813186813e-05, |
|
"loss": 0.1603, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 2.374084249084249e-05, |
|
"loss": 0.1511, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 2.3168498168498172e-05, |
|
"loss": 0.1544, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 2.2596153846153846e-05, |
|
"loss": 0.1517, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 2.2023809523809524e-05, |
|
"loss": 0.1556, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 2.14514652014652e-05, |
|
"loss": 0.1688, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 2.0879120879120882e-05, |
|
"loss": 0.1658, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 2.0306776556776557e-05, |
|
"loss": 0.1587, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 1.9734432234432234e-05, |
|
"loss": 0.1664, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 1.9162087912087915e-05, |
|
"loss": 0.1558, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 1.858974358974359e-05, |
|
"loss": 0.1591, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 1.8017399267399267e-05, |
|
"loss": 0.1558, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 1.7445054945054945e-05, |
|
"loss": 0.1583, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 1.6872710622710626e-05, |
|
"loss": 0.155, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 1.63003663003663e-05, |
|
"loss": 0.1644, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 1.5728021978021978e-05, |
|
"loss": 0.1574, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 1.5155677655677656e-05, |
|
"loss": 0.1555, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 1.4583333333333335e-05, |
|
"loss": 0.1542, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 1.4010989010989013e-05, |
|
"loss": 0.1537, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.3438644688644688e-05, |
|
"loss": 0.1493, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"eval_Bleu_1": 0.8593, |
|
"eval_Bleu_2": 0.0009, |
|
"eval_Bleu_3": 0.0001, |
|
"eval_Bleu_4": 0.0, |
|
"eval_ROUGE_L": 0.8593, |
|
"eval_accuracy": 85.9274, |
|
"eval_f1": 85.9274, |
|
"eval_gen_len": 2.2038, |
|
"eval_loss": 0.1681133359670639, |
|
"eval_precision": 85.9274, |
|
"eval_recall": 85.9274, |
|
"eval_rouge1": 86.0601, |
|
"eval_rouge2": 3.5662, |
|
"eval_rougeL": 86.0539, |
|
"eval_rougeLsum": 86.0496, |
|
"eval_runtime": 1001.4875, |
|
"eval_samples_per_second": 34.803, |
|
"eval_steps_per_second": 4.351, |
|
"step": 1600 |
|
} |
|
], |
|
"max_steps": 2186, |
|
"num_train_epochs": 2, |
|
"total_flos": 2.7769320388994007e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|