asahi417 commited on
Commit
f579b3e
1 Parent(s): 663c0cf
eval_pipeline/metric.first.answer.paragraph.questions_answers.lmqg_qg_ruquad.default.lmqg_mt5-small-ruquad-ae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"test": {"QAAlignedF1Score (BERTScore)": 0.769564467481494, "QAAlignedRecall (BERTScore)": 0.8104800530876641, "QAAlignedPrecision (BERTScore)": 0.7340640614951169, "QAAlignedF1Score (MoverScore)": 0.5553017051853896, "QAAlignedRecall (MoverScore)": 0.5824871086285293, "QAAlignedPrecision (MoverScore)": 0.5323555019123399, "Bleu_1": 0.09809370668245954, "Bleu_2": 0.05766913203993452, "Bleu_3": 0.037025638577596005, "Bleu_4": 0.024344653026298183, "METEOR": 0.2126759057562774, "ROUGE_L": 0.14737291328906257, "BERTScore": 0.6617713167962964, "MoverScore": 0.5156516795697178}, "validation": {"QAAlignedF1Score (BERTScore)": 0.7686173869416876, "QAAlignedRecall (BERTScore)": 0.8090919205024955, "QAAlignedPrecision (BERTScore)": 0.7333954962274706, "QAAlignedF1Score (MoverScore)": 0.5546709275297588, "QAAlignedRecall (MoverScore)": 0.5812488517870633, "QAAlignedPrecision (MoverScore)": 0.532063361082291, "Bleu_1": 0.09614915764647886, "Bleu_2": 0.0562186425261996, "Bleu_3": 0.03628241966729175, "Bleu_4": 0.023843321692140835, "METEOR": 0.21131256805035895, "ROUGE_L": 0.1465141070796531, "BERTScore": 0.6604539006254374, "MoverScore": 0.5152313566891569}}
eval_pipeline/samples.test.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.lmqg_mt5-small-ruquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
eval_pipeline/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.lmqg_mt5-small-ruquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff