asahi417 commited on
Commit
fe65532
1 Parent(s): 4cef404
eval_pipeline/metric.first.answer.paragraph.questions_answers.lmqg_qg_ruquad.default.lmqg_mt5-base-ruquad-ae.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"test": {"QAAlignedF1Score (BERTScore)": 0.7703143025575259, "QAAlignedRecall (BERTScore)": 0.811734422231678, "QAAlignedPrecision (BERTScore)": 0.7343977774370408, "QAAlignedF1Score (MoverScore)": 0.5560888122051312, "QAAlignedRecall (MoverScore)": 0.5838909886435096, "QAAlignedPrecision (MoverScore)": 0.5326916036192664, "Bleu_1": 0.09598969129931906, "Bleu_2": 0.056414178652289884, "Bleu_3": 0.03609459807595688, "Bleu_4": 0.02308504634418144, "METEOR": 0.2130643175268161, "ROUGE_L": 0.14443020338305196, "BERTScore": 0.6617029125131955, "MoverScore": 0.5154646575680376}, "validation": {"QAAlignedF1Score (BERTScore)": 0.7707893329193379, "QAAlignedRecall (BERTScore)": 0.8128786736754525, "QAAlignedPrecision (BERTScore)": 0.7342287774860152, "QAAlignedF1Score (MoverScore)": 0.557087409262424, "QAAlignedRecall (MoverScore)": 0.5856287306166442, "QAAlignedPrecision (MoverScore)": 0.533092559547624, "Bleu_1": 0.0949684209798848, "Bleu_2": 0.0555531243167928, "Bleu_3": 0.03560257686687513, "Bleu_4": 0.023153585064817527, "METEOR": 0.21295517156421126, "ROUGE_L": 0.14374077283791178, "BERTScore": 0.6605021815420979, "MoverScore": 0.5151732256618153}}
eval_pipeline/samples.test.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.lmqg_mt5-base-ruquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
eval_pipeline/samples.validation.hyp.paragraph.questions_answers.lmqg_qg_ruquad.default.lmqg_mt5-base-ruquad-ae.txt ADDED
The diff for this file is too large to render. See raw diff
 
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6487bd9ac3c24d1255cbe718e5f5c5830f32ac84ee38b237fd39f8e33ca995cd
3
- size 2329628621
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ab8d62e7115f71ae645cd87625ce40e29e94fa79c6cffb448c063f0d45b5a81
3
+ size 2329630901