mt5-small-esquad-qg-ae / eval /metric.first.answer.paragraph.questions_answers.lmqg_qg_esquad.default.json
asahi417's picture
add model
6e7c8e3
raw
history blame
1.17 kB
{"test": {"QAAlignedF1Score (BERTScore)": 0.7905906899103425, "QAAlignedF1Score (MoverScore)": 0.5449111789219835, "QAAlignedRecall (BERTScore)": 0.8193740462603651, "QAAlignedPrecision (BERTScore)": 0.7645582065809101, "QAAlignedRecall (MoverScore)": 0.562123937377997, "QAAlignedPrecision (MoverScore)": 0.5295862912867637, "Bleu_1": 0.10523364263702538, "Bleu_2": 0.05186327661896085, "Bleu_3": 0.02823473237375032, "Bleu_4": 0.017286670225730407, "METEOR": 0.21823975274687754, "ROUGE_L": 0.14860059176268736, "BERTScore": 0.6893117264300432, "MoverScore": 0.5158691179047461}, "validation": {"QAAlignedF1Score (BERTScore)": 0.8086047957132404, "QAAlignedF1Score (MoverScore)": 0.5583057470617592, "QAAlignedRecall (BERTScore)": 0.8123977244780864, "QAAlignedPrecision (BERTScore)": 0.8054143989666154, "QAAlignedRecall (MoverScore)": 0.5573398601761835, "QAAlignedPrecision (MoverScore)": 0.5596609610126972, "Bleu_1": 0.2796299558796152, "Bleu_2": 0.17059700073713194, "Bleu_3": 0.10288457592152213, "Bleu_4": 0.06616948942055641, "METEOR": 0.26817201986338535, "ROUGE_L": 0.26018900226073527, "BERTScore": 0.7722515311861707, "MoverScore": 0.5504735671494859}}