t5-base-subjqa-books-qg / eval /metric.middle.sentence.sentence_answer.question.lmqg_qg_subjqa.books.json
asahi417's picture
update
603d39c
raw
history blame
533 Bytes
{"validation": {"Bleu_1": 0.232245681381512, "Bleu_2": 0.15415795267798185, "Bleu_3": 0.0749251820243315, "Bleu_4": 0.042915624209149565, "METEOR": 0.22329435997820107, "ROUGE_L": 0.2509073353813845, "BERTScore": 0.9281497250546465, "MoverScore": 0.6398015424796643}, "test": {"Bleu_1": 0.2057541412378328, "Bleu_2": 0.11551574706874243, "Bleu_3": 0.03740365713101154, "Bleu_4": 3.0899935818563215e-06, "METEOR": 0.20487517127980073, "ROUGE_L": 0.21621888099871492, "BERTScore": 0.9241682649907017, "MoverScore": 0.6219601654488888}}