asahi417 commited on
Commit
da65d48
1 Parent(s): 0439709
eval_pipeline/metric.first.answer.paragraph.questions_answers.lmqg_qg_dequad.default.lmqg_mt5-base-dequad-ae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"test": {"QAAlignedF1Score (BERTScore)": 0.768603313809328, "QAAlignedRecall (BERTScore)": 0.7754904506314969, "QAAlignedPrecision (BERTScore)": 0.7627820593320781, "QAAlignedF1Score (MoverScore)": 0.5296466870139147, "QAAlignedRecall (MoverScore)": 0.530601049066982, "QAAlignedPrecision (MoverScore)": 0.529288998661448, "Bleu_1": 0.13465855425310752, "Bleu_2": 0.06318621771229568, "Bleu_3": 0.02025347148205866, "Bleu_4": 0.008659164185655276, "METEOR": 0.19573547861302001, "ROUGE_L": 0.15671950697655063, "BERTScore": 0.6982286767366064, "MoverScore": 0.5232427811488842}, "validation": {"QAAlignedF1Score (BERTScore)": 0.7699074227064933, "QAAlignedRecall (BERTScore)": 0.796040308626929, "QAAlignedPrecision (BERTScore)": 0.7463165726924668, "QAAlignedF1Score (MoverScore)": 0.5289634862785687, "QAAlignedRecall (MoverScore)": 0.5414913411061889, "QAAlignedPrecision (MoverScore)": 0.5176901326169612, "Bleu_1": 0.07018436293720098, "Bleu_2": 0.02847070920383554, "Bleu_3": 0.008932874125042595, "Bleu_4": 0.003731838069753556, "METEOR": 0.15797292956814057, "ROUGE_L": 0.10377178194108776, "BERTScore": 0.6428503760194714, "MoverScore": 0.5049178305206184}}
eval_pipeline/samples.test.hyp.paragraph.questions_answers.lmqg_qg_dequad.default.lmqg_mt5-base-dequad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
eval_pipeline/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_dequad.default.lmqg_mt5-base-dequad-ae.txt ADDED
The diff for this file is too large to render. See raw diff