asahi417 commited on
Commit
4ab8b81
1 Parent(s): f509c6b
eval_pipeline/metric.first.answer.paragraph.questions_answers.lmqg_qg_itquad.default.lmqg_mt5-small-itquad-ae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"test": {"QAAlignedF1Score (BERTScore)": 0.816309736806609, "QAAlignedRecall (BERTScore)": 0.8227587281249125, "QAAlignedPrecision (BERTScore)": 0.8104451495421456, "QAAlignedF1Score (MoverScore)": 0.5584714375008074, "QAAlignedRecall (MoverScore)": 0.5614174450092285, "QAAlignedPrecision (MoverScore)": 0.5559663056330884, "Bleu_1": 0.23769265041451257, "Bleu_2": 0.13254641010425042, "Bleu_3": 0.0648088291470028, "Bleu_4": 0.034816186122057016, "METEOR": 0.23945736739431706, "ROUGE_L": 0.23306500682635067, "BERTScore": 0.772340482509352, "MoverScore": 0.546605905880354}, "validation": {"QAAlignedF1Score (BERTScore)": 0.806215693009629, "QAAlignedRecall (BERTScore)": 0.8316784927119242, "QAAlignedPrecision (BERTScore)": 0.7829386516950502, "QAAlignedF1Score (MoverScore)": 0.5510756937124439, "QAAlignedRecall (MoverScore)": 0.5682362728710998, "QAAlignedPrecision (MoverScore)": 0.5358292039030634, "Bleu_1": 0.10317621096656643, "Bleu_2": 0.04377229616025825, "Bleu_3": 0.019738249887163023, "Bleu_4": 0.010054336297258994, "METEOR": 0.20045587838927703, "ROUGE_L": 0.1455599293738737, "BERTScore": 0.6949032224477476, "MoverScore": 0.5166575484225808}}
eval_pipeline/samples.test.hyp.paragraph.questions_answers.lmqg_qg_itquad.default.lmqg_mt5-small-itquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
eval_pipeline/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_itquad.default.lmqg_mt5-small-itquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff