asahi417 commited on
Commit
4b46d81
1 Parent(s): 6b8cd38

model update

Browse files
README.md CHANGED
@@ -32,6 +32,18 @@ model-index:
32
  - name: BLEU4 (Question Answering)
33
  type: bleu4_question_answering
34
  value: 10.81
 
 
 
 
 
 
 
 
 
 
 
 
35
  - name: AnswerF1Score (Question Answering)
36
  type: answer_f1_score__question_answering
37
  value: 39.24
@@ -83,10 +95,14 @@ output = pipe("question: En quelle année a-t-on trouvé trace d'un haut fournea
83
  |:-----------------|--------:|:--------|:-----------------------------------------------------------------|
84
  | AnswerExactMatch | 22.43 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
85
  | AnswerF1Score | 39.24 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
 
86
  | Bleu_1 | 18.63 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
87
  | Bleu_2 | 15.15 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
88
  | Bleu_3 | 12.76 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
89
  | Bleu_4 | 10.81 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
 
 
 
90
 
91
 
92
 
 
32
  - name: BLEU4 (Question Answering)
33
  type: bleu4_question_answering
34
  value: 10.81
35
+ - name: ROUGE-L (Question Answering)
36
+ type: rouge_l_question_answering
37
+ value: 25.75
38
+ - name: METEOR (Question Answering)
39
+ type: meteor_question_answering
40
+ value: 20.96
41
+ - name: BERTScore (Question Answering)
42
+ type: bertscore_question_answering
43
+ value: 87.27
44
+ - name: MoverScore (Question Answering)
45
+ type: moverscore_question_answering
46
+ value: 67.79
47
  - name: AnswerF1Score (Question Answering)
48
  type: answer_f1_score__question_answering
49
  value: 39.24
 
95
  |:-----------------|--------:|:--------|:-----------------------------------------------------------------|
96
  | AnswerExactMatch | 22.43 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
97
  | AnswerF1Score | 39.24 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
98
+ | BERTScore | 87.27 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
99
  | Bleu_1 | 18.63 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
100
  | Bleu_2 | 15.15 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
101
  | Bleu_3 | 12.76 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
102
  | Bleu_4 | 10.81 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
103
+ | METEOR | 20.96 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
104
+ | MoverScore | 67.79 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
105
+ | ROUGE_L | 25.75 | default | [lmqg/qg_frquad](https://huggingface.co/datasets/lmqg/qg_frquad) |
106
 
107
 
108
 
eval/metric.first.answer.paragraph_question.answer.lmqg_qg_frquad.default.json CHANGED
@@ -1 +1 @@
1
- {"validation": {"Bleu_1": 0.19846395531505504, "Bleu_2": 0.16438125648808355, "Bleu_3": 0.14011480904266574, "Bleu_4": 0.12095514167776761, "AnswerF1Score": 37.86047708898501, "AnswerExactMatch": 16.499372647427855}, "test": {"Bleu_1": 0.18625190128059535, "Bleu_2": 0.15147379408861977, "Bleu_3": 0.12758082981388957, "Bleu_4": 0.10805168198813628, "AnswerF1Score": 39.237140683394436, "AnswerExactMatch": 22.427854454203263}}
 
1
+ {"validation": {"Bleu_1": 0.19846395531505504, "Bleu_2": 0.16438125648808355, "Bleu_3": 0.14011480904266574, "Bleu_4": 0.12095514167776761, "AnswerF1Score": 37.86047708898501, "AnswerExactMatch": 16.499372647427855, "METEOR": 0.2064317193398918, "ROUGE_L": 0.25360764971725297, "BERTScore": 0.8696774432956799, "MoverScore": 0.6634372689334539}, "test": {"Bleu_1": 0.18625190128059535, "Bleu_2": 0.15147379408861977, "Bleu_3": 0.12758082981388957, "Bleu_4": 0.10805168198813628, "AnswerF1Score": 39.237140683394436, "AnswerExactMatch": 22.427854454203263, "METEOR": 0.20958433174855287, "ROUGE_L": 0.2575338552201397, "BERTScore": 0.8727350258729988, "MoverScore": 0.6778583344478704}}