diff_based_error_tagger / eval_results.json
Shadman-Rohan's picture
Training in progress, epoch 1
ff24bac
raw
history blame contribute delete
No virus
2.16 kB
{
"epoch": 30.0,
"eval_5_err_f1": 0.9850746268656716,
"eval_5_err_number": 34,
"eval_5_err_precision": 1.0,
"eval_5_err_recall": 0.9705882352941176,
"eval___f1": 0.9912903388209234,
"eval___number": 9934,
"eval___precision": 0.9915399335280491,
"eval___recall": 0.9910408697402859,
"eval__err_f1": 0.9808695652173912,
"eval__err_number": 285,
"eval__err_precision": 0.9724137931034482,
"eval__err_recall": 0.9894736842105263,
"eval_egin_err_f1": 0.9933598937583,
"eval_egin_err_number": 1126,
"eval_egin_err_precision": 0.9902912621359223,
"eval_egin_err_recall": 0.9964476021314387,
"eval_el_err_f1": 0.9956584659913169,
"eval_el_err_number": 1380,
"eval_el_err_precision": 0.9942196531791907,
"eval_el_err_recall": 0.9971014492753624,
"eval_loss": 0.0013192046899348497,
"eval_macro_avg_f1": 0.9912800394235617,
"eval_macro_avg_number": 23096,
"eval_macro_avg_precision": 0.991795835558829,
"eval_macro_avg_recall": 0.990841420701172,
"eval_micro_avg_f1": 0.993373754872239,
"eval_micro_avg_number": 23096,
"eval_micro_avg_precision": 0.9936319528677872,
"eval_micro_avg_recall": 0.9931156910287495,
"eval_nd_err_f1": 0.9919932574799832,
"eval_nd_err_number": 1188,
"eval_nd_err_precision": 0.9932489451476794,
"eval_nd_err_recall": 0.9907407407407407,
"eval_ne_word_err_f1": 0.9958722835983974,
"eval_ne_word_err_number": 8247,
"eval_ne_word_err_precision": 0.9970827762246263,
"eval_ne_word_err_recall": 0.9946647265672366,
"eval_overall_accuracy": 0.9993957202777317,
"eval_runtime": 11.4329,
"eval_samples": 18388,
"eval_samples_per_second": 1608.342,
"eval_steps_per_second": 100.587,
"eval_unc_insert_err_f1": 0.9961218836565098,
"eval_unc_insert_err_number": 902,
"eval_unc_insert_err_precision": 0.9955703211517165,
"eval_unc_insert_err_recall": 0.9966740576496674,
"eval_weighted_avg_f1": 0.993375440096453,
"eval_weighted_avg_number": 23096,
"eval_weighted_avg_precision": 0.9936401344515604,
"eval_weighted_avg_recall": 0.9931156910287495
}