mt5-small-koquad-qag / eval /metric.first.answer.paragraph.questions_answers.lmqg_qag_koquad.default.json
asahi417's picture
model update
0d4c712
raw
history blame
No virus
1.16 kB
{"validation": {"Bleu_1": 0.20720502657508894, "Bleu_2": 0.12973359463297998, "Bleu_3": 0.07736389639682253, "Bleu_4": 0.05002422519479112, "METEOR": 0.20084513698908732, "ROUGE_L": 0.2328243351207967, "BERTScore": 0.7121946530416607, "MoverScore": 0.6559541024331569, "QAAlignedF1Score (BERTScore)": 0.7675924272601385, "QAAlignedRecall (BERTScore)": 0.745755535757304, "QAAlignedPrecision (BERTScore)": 0.7914428979179097, "QAAlignedF1Score (MoverScore)": 0.790290732358258, "QAAlignedRecall (MoverScore)": 0.7572321940950104, "QAAlignedPrecision (MoverScore)": 0.8282231121807863}, "test": {"Bleu_1": 0.040932977657776856, "Bleu_2": 0.020861251455809058, "Bleu_3": 0.011692083309161437, "Bleu_4": 0.007073299521744108, "METEOR": 0.129459498216136, "ROUGE_L": 0.0806453671612055, "BERTScore": 0.5839372786489063, "MoverScore": 0.5881119031553251, "QAAlignedF1Score (BERTScore)": 0.7423411454198516, "QAAlignedRecall (BERTScore)": 0.7420430708538479, "QAAlignedPrecision (BERTScore)": 0.742871617137688, "QAAlignedF1Score (MoverScore)": 0.7505986412090764, "QAAlignedRecall (MoverScore)": 0.750358323011417, "QAAlignedPrecision (MoverScore)": 0.7514380757963297}}