mt5-small-ruquad-qg / eval /metric.first.answer.sentence_answer.question.lmqg_qg_ruquad.default.json
asahi417's picture
model update
dbb4d17
raw
history blame
528 Bytes
{"validation": {"Bleu_1": 0.2949269778019981, "Bleu_2": 0.23082986140013634, "Bleu_3": 0.18510441304718853, "Bleu_4": 0.1501875465027891, "METEOR": 0.25635558139998266, "ROUGE_L": 0.3046824774358573, "BERTScore": 0.8354113963480693, "MoverScore": 0.6184491598342212}, "test": {"Bleu_1": 0.2992935131663399, "Bleu_2": 0.23502957980659914, "Bleu_3": 0.18938330949208243, "Bleu_4": 0.15490731659462117, "METEOR": 0.255728877241248, "ROUGE_L": 0.30544059419765696, "BERTScore": 0.8345718397400131, "MoverScore": 0.6157500088308799}}