model update
Browse files
README.md
CHANGED
@@ -31,20 +31,20 @@ model-index:
|
|
31 |
type: default
|
32 |
args: default
|
33 |
metrics:
|
34 |
-
- name: BLEU4
|
35 |
-
type:
|
36 |
value: 21.65
|
37 |
-
- name: ROUGE-L
|
38 |
-
type:
|
39 |
value: 48.95
|
40 |
-
- name: METEOR
|
41 |
-
type:
|
42 |
value: 23.83
|
43 |
-
- name: BERTScore
|
44 |
-
type:
|
45 |
value: 90.01
|
46 |
-
- name: MoverScore
|
47 |
-
type:
|
48 |
value: 62.75
|
49 |
- task:
|
50 |
name: Text2text Generation
|
@@ -54,20 +54,20 @@ model-index:
|
|
54 |
type: default
|
55 |
args: default
|
56 |
metrics:
|
57 |
-
- name: BLEU4
|
58 |
-
type:
|
59 |
value: 9.242783121165897e-12
|
60 |
-
- name: ROUGE-L
|
61 |
-
type:
|
62 |
value: 0.01556150764938016
|
63 |
-
- name: METEOR
|
64 |
-
type:
|
65 |
value: 0.04809700451843158
|
66 |
-
- name: BERTScore
|
67 |
-
type:
|
68 |
value: 0.7353078946893743
|
69 |
-
- name: MoverScore
|
70 |
-
type:
|
71 |
value: 0.5036973829954939
|
72 |
- task:
|
73 |
name: Text2text Generation
|
@@ -77,20 +77,20 @@ model-index:
|
|
77 |
type: default
|
78 |
args: default
|
79 |
metrics:
|
80 |
-
- name: BLEU4
|
81 |
-
type:
|
82 |
value: 0.0059191752064594125
|
83 |
-
- name: ROUGE-L
|
84 |
-
type:
|
85 |
value: 0.05208940592236566
|
86 |
-
- name: METEOR
|
87 |
-
type:
|
88 |
value: 0.06021086135293597
|
89 |
-
- name: BERTScore
|
90 |
-
type:
|
91 |
value: 0.7494422899749911
|
92 |
-
- name: MoverScore
|
93 |
-
type:
|
94 |
value: 0.5062373132800192
|
95 |
- task:
|
96 |
name: Text2text Generation
|
@@ -100,20 +100,20 @@ model-index:
|
|
100 |
type: default
|
101 |
args: default
|
102 |
metrics:
|
103 |
-
- name: BLEU4
|
104 |
-
type:
|
105 |
value: 0.0171464639522496
|
106 |
-
- name: ROUGE-L
|
107 |
-
type:
|
108 |
value: 0.1583673053928925
|
109 |
-
- name: METEOR
|
110 |
-
type:
|
111 |
value: 0.08244973027319356
|
112 |
-
- name: BERTScore
|
113 |
-
type:
|
114 |
value: 0.7291012183458674
|
115 |
-
- name: MoverScore
|
116 |
-
type:
|
117 |
value: 0.509610854598101
|
118 |
- task:
|
119 |
name: Text2text Generation
|
@@ -123,20 +123,20 @@ model-index:
|
|
123 |
type: default
|
124 |
args: default
|
125 |
metrics:
|
126 |
-
- name: BLEU4
|
127 |
-
type:
|
128 |
value: 0.005438910607183992
|
129 |
-
- name: ROUGE-L
|
130 |
-
type:
|
131 |
value: 0.05010570221421983
|
132 |
-
- name: METEOR
|
133 |
-
type:
|
134 |
value: 0.05890828426558759
|
135 |
-
- name: BERTScore
|
136 |
-
type:
|
137 |
value: 0.7260160158030385
|
138 |
-
- name: MoverScore
|
139 |
-
type:
|
140 |
value: 0.5023119088393686
|
141 |
- task:
|
142 |
name: Text2text Generation
|
@@ -146,20 +146,20 @@ model-index:
|
|
146 |
type: default
|
147 |
args: default
|
148 |
metrics:
|
149 |
-
- name: BLEU4
|
150 |
-
type:
|
151 |
value: 4.4114578660129224e-08
|
152 |
-
- name: ROUGE-L
|
153 |
-
type:
|
154 |
value: 0.06084267343290677
|
155 |
-
- name: METEOR
|
156 |
-
type:
|
157 |
value: 0.005149267426183168
|
158 |
-
- name: BERTScore
|
159 |
-
type:
|
160 |
value: 0.6608093198082075
|
161 |
-
- name: MoverScore
|
162 |
-
type:
|
163 |
value: 0.46526108687696893
|
164 |
- task:
|
165 |
name: Text2text Generation
|
@@ -169,20 +169,20 @@ model-index:
|
|
169 |
type: default
|
170 |
args: default
|
171 |
metrics:
|
172 |
-
- name: BLEU4
|
173 |
-
type:
|
174 |
value: 1.4750917137316939e-12
|
175 |
-
- name: ROUGE-L
|
176 |
-
type:
|
177 |
value: 0.0006466767450454226
|
178 |
-
- name: METEOR
|
179 |
-
type:
|
180 |
value: 0.007310046912436679
|
181 |
-
- name: BERTScore
|
182 |
-
type:
|
183 |
value: 0.6634288882769679
|
184 |
-
- name: MoverScore
|
185 |
-
type:
|
186 |
value: 0.4586124640357038
|
187 |
- task:
|
188 |
name: Text2text Generation
|
@@ -192,20 +192,20 @@ model-index:
|
|
192 |
type: default
|
193 |
args: default
|
194 |
metrics:
|
195 |
-
- name: BLEU4
|
196 |
-
type:
|
197 |
value: 4.229109829516021e-12
|
198 |
-
- name: ROUGE-L
|
199 |
-
type:
|
200 |
value: 0.009881091250723615
|
201 |
-
- name: METEOR
|
202 |
-
type:
|
203 |
value: 0.017796529053904556
|
204 |
-
- name: BERTScore
|
205 |
-
type:
|
206 |
value: 0.7089446693028568
|
207 |
-
- name: MoverScore
|
208 |
-
type:
|
209 |
value: 0.49098728551715626
|
210 |
---
|
211 |
|
|
|
31 |
type: default
|
32 |
args: default
|
33 |
metrics:
|
34 |
+
- name: BLEU4 (Question Generation)
|
35 |
+
type: bleu4_question_generation
|
36 |
value: 21.65
|
37 |
+
- name: ROUGE-L (Question Generation)
|
38 |
+
type: rouge_l_question_generation
|
39 |
value: 48.95
|
40 |
+
- name: METEOR (Question Generation)
|
41 |
+
type: meteor_question_generation
|
42 |
value: 23.83
|
43 |
+
- name: BERTScore (Question Generation)
|
44 |
+
type: bertscore_question_generation
|
45 |
value: 90.01
|
46 |
+
- name: MoverScore (Question Generation)
|
47 |
+
type: moverscore_question_generation
|
48 |
value: 62.75
|
49 |
- task:
|
50 |
name: Text2text Generation
|
|
|
54 |
type: default
|
55 |
args: default
|
56 |
metrics:
|
57 |
+
- name: BLEU4 (Question Generation)
|
58 |
+
type: bleu4_question_generation
|
59 |
value: 9.242783121165897e-12
|
60 |
+
- name: ROUGE-L (Question Generation)
|
61 |
+
type: rouge_l_question_generation
|
62 |
value: 0.01556150764938016
|
63 |
+
- name: METEOR (Question Generation)
|
64 |
+
type: meteor_question_generation
|
65 |
value: 0.04809700451843158
|
66 |
+
- name: BERTScore (Question Generation)
|
67 |
+
type: bertscore_question_generation
|
68 |
value: 0.7353078946893743
|
69 |
+
- name: MoverScore (Question Generation)
|
70 |
+
type: moverscore_question_generation
|
71 |
value: 0.5036973829954939
|
72 |
- task:
|
73 |
name: Text2text Generation
|
|
|
77 |
type: default
|
78 |
args: default
|
79 |
metrics:
|
80 |
+
- name: BLEU4 (Question Generation)
|
81 |
+
type: bleu4_question_generation
|
82 |
value: 0.0059191752064594125
|
83 |
+
- name: ROUGE-L (Question Generation)
|
84 |
+
type: rouge_l_question_generation
|
85 |
value: 0.05208940592236566
|
86 |
+
- name: METEOR (Question Generation)
|
87 |
+
type: meteor_question_generation
|
88 |
value: 0.06021086135293597
|
89 |
+
- name: BERTScore (Question Generation)
|
90 |
+
type: bertscore_question_generation
|
91 |
value: 0.7494422899749911
|
92 |
+
- name: MoverScore (Question Generation)
|
93 |
+
type: moverscore_question_generation
|
94 |
value: 0.5062373132800192
|
95 |
- task:
|
96 |
name: Text2text Generation
|
|
|
100 |
type: default
|
101 |
args: default
|
102 |
metrics:
|
103 |
+
- name: BLEU4 (Question Generation)
|
104 |
+
type: bleu4_question_generation
|
105 |
value: 0.0171464639522496
|
106 |
+
- name: ROUGE-L (Question Generation)
|
107 |
+
type: rouge_l_question_generation
|
108 |
value: 0.1583673053928925
|
109 |
+
- name: METEOR (Question Generation)
|
110 |
+
type: meteor_question_generation
|
111 |
value: 0.08244973027319356
|
112 |
+
- name: BERTScore (Question Generation)
|
113 |
+
type: bertscore_question_generation
|
114 |
value: 0.7291012183458674
|
115 |
+
- name: MoverScore (Question Generation)
|
116 |
+
type: moverscore_question_generation
|
117 |
value: 0.509610854598101
|
118 |
- task:
|
119 |
name: Text2text Generation
|
|
|
123 |
type: default
|
124 |
args: default
|
125 |
metrics:
|
126 |
+
- name: BLEU4 (Question Generation)
|
127 |
+
type: bleu4_question_generation
|
128 |
value: 0.005438910607183992
|
129 |
+
- name: ROUGE-L (Question Generation)
|
130 |
+
type: rouge_l_question_generation
|
131 |
value: 0.05010570221421983
|
132 |
+
- name: METEOR (Question Generation)
|
133 |
+
type: meteor_question_generation
|
134 |
value: 0.05890828426558759
|
135 |
+
- name: BERTScore (Question Generation)
|
136 |
+
type: bertscore_question_generation
|
137 |
value: 0.7260160158030385
|
138 |
+
- name: MoverScore (Question Generation)
|
139 |
+
type: moverscore_question_generation
|
140 |
value: 0.5023119088393686
|
141 |
- task:
|
142 |
name: Text2text Generation
|
|
|
146 |
type: default
|
147 |
args: default
|
148 |
metrics:
|
149 |
+
- name: BLEU4 (Question Generation)
|
150 |
+
type: bleu4_question_generation
|
151 |
value: 4.4114578660129224e-08
|
152 |
+
- name: ROUGE-L (Question Generation)
|
153 |
+
type: rouge_l_question_generation
|
154 |
value: 0.06084267343290677
|
155 |
+
- name: METEOR (Question Generation)
|
156 |
+
type: meteor_question_generation
|
157 |
value: 0.005149267426183168
|
158 |
+
- name: BERTScore (Question Generation)
|
159 |
+
type: bertscore_question_generation
|
160 |
value: 0.6608093198082075
|
161 |
+
- name: MoverScore (Question Generation)
|
162 |
+
type: moverscore_question_generation
|
163 |
value: 0.46526108687696893
|
164 |
- task:
|
165 |
name: Text2text Generation
|
|
|
169 |
type: default
|
170 |
args: default
|
171 |
metrics:
|
172 |
+
- name: BLEU4 (Question Generation)
|
173 |
+
type: bleu4_question_generation
|
174 |
value: 1.4750917137316939e-12
|
175 |
+
- name: ROUGE-L (Question Generation)
|
176 |
+
type: rouge_l_question_generation
|
177 |
value: 0.0006466767450454226
|
178 |
+
- name: METEOR (Question Generation)
|
179 |
+
type: meteor_question_generation
|
180 |
value: 0.007310046912436679
|
181 |
+
- name: BERTScore (Question Generation)
|
182 |
+
type: bertscore_question_generation
|
183 |
value: 0.6634288882769679
|
184 |
+
- name: MoverScore (Question Generation)
|
185 |
+
type: moverscore_question_generation
|
186 |
value: 0.4586124640357038
|
187 |
- task:
|
188 |
name: Text2text Generation
|
|
|
192 |
type: default
|
193 |
args: default
|
194 |
metrics:
|
195 |
+
- name: BLEU4 (Question Generation)
|
196 |
+
type: bleu4_question_generation
|
197 |
value: 4.229109829516021e-12
|
198 |
+
- name: ROUGE-L (Question Generation)
|
199 |
+
type: rouge_l_question_generation
|
200 |
value: 0.009881091250723615
|
201 |
+
- name: METEOR (Question Generation)
|
202 |
+
type: meteor_question_generation
|
203 |
value: 0.017796529053904556
|
204 |
+
- name: BERTScore (Question Generation)
|
205 |
+
type: bertscore_question_generation
|
206 |
value: 0.7089446693028568
|
207 |
+
- name: MoverScore (Question Generation)
|
208 |
+
type: moverscore_question_generation
|
209 |
value: 0.49098728551715626
|
210 |
---
|
211 |
|