asahi417 commited on
Commit
225d4e3
1 Parent(s): 5b4435f

model update

Browse files
Files changed (1) hide show
  1. README.md +116 -116
README.md CHANGED
@@ -31,37 +31,37 @@ model-index:
31
  type: default
32
  args: default
33
  metrics:
34
- - name: BLEU4
35
- type: bleu4
36
  value: 26.17
37
- - name: ROUGE-L
38
- type: rouge-l
39
  value: 53.85
40
- - name: METEOR
41
- type: meteor
42
  value: 27.07
43
- - name: BERTScore
44
- type: bertscore
45
  value: 91.0
46
- - name: MoverScore
47
- type: moverscore
48
  value: 64.99
49
- - name: QAAlignedF1Score (BERTScore) [Gold Answer]
50
  type: qa_aligned_f1_score_bertscore_gold_answer
51
  value: 95.54
52
- - name: QAAlignedRecall (BERTScore) [Gold Answer]
53
  type: qa_aligned_recall_bertscore_gold_answer
54
  value: 95.49
55
- - name: QAAlignedPrecision (BERTScore) [Gold Answer]
56
  type: qa_aligned_precision_bertscore_gold_answer
57
  value: 95.59
58
- - name: QAAlignedF1Score (MoverScore) [Gold Answer]
59
  type: qa_aligned_f1_score_moverscore_gold_answer
60
  value: 70.82
61
- - name: QAAlignedRecall (MoverScore) [Gold Answer]
62
  type: qa_aligned_recall_moverscore_gold_answer
63
  value: 70.54
64
- - name: QAAlignedPrecision (MoverScore) [Gold Answer]
65
  type: qa_aligned_precision_moverscore_gold_answer
66
  value: 71.13
67
  - task:
@@ -72,20 +72,20 @@ model-index:
72
  type: amazon
73
  args: amazon
74
  metrics:
75
- - name: BLEU4
76
- type: bleu4
77
  value: 0.06530369842068952
78
- - name: ROUGE-L
79
- type: rouge-l
80
  value: 0.25030985091008146
81
- - name: METEOR
82
- type: meteor
83
  value: 0.2229994442645732
84
- - name: BERTScore
85
- type: bertscore
86
  value: 0.9092814804525936
87
- - name: MoverScore
88
- type: moverscore
89
  value: 0.6086538514008419
90
  - task:
91
  name: Text2text Generation
@@ -95,20 +95,20 @@ model-index:
95
  type: new_wiki
96
  args: new_wiki
97
  metrics:
98
- - name: BLEU4
99
- type: bleu4
100
  value: 0.11118273173452982
101
- - name: ROUGE-L
102
- type: rouge-l
103
  value: 0.2967546690273089
104
- - name: METEOR
105
- type: meteor
106
  value: 0.27315087810722966
107
- - name: BERTScore
108
- type: bertscore
109
  value: 0.9322739617807421
110
- - name: MoverScore
111
- type: moverscore
112
  value: 0.6623000084761579
113
  - task:
114
  name: Text2text Generation
@@ -118,20 +118,20 @@ model-index:
118
  type: nyt
119
  args: nyt
120
  metrics:
121
- - name: BLEU4
122
- type: bleu4
123
  value: 0.08117757543966063
124
- - name: ROUGE-L
125
- type: rouge-l
126
  value: 0.25292097720734297
127
- - name: METEOR
128
- type: meteor
129
  value: 0.25254205113198686
130
- - name: BERTScore
131
- type: bertscore
132
  value: 0.9249009759439454
133
- - name: MoverScore
134
- type: moverscore
135
  value: 0.6406329128556304
136
  - task:
137
  name: Text2text Generation
@@ -141,20 +141,20 @@ model-index:
141
  type: reddit
142
  args: reddit
143
  metrics:
144
- - name: BLEU4
145
- type: bleu4
146
  value: 0.059525104157825456
147
- - name: ROUGE-L
148
- type: rouge-l
149
  value: 0.22365090580055863
150
- - name: METEOR
151
- type: meteor
152
  value: 0.21499800504546457
153
- - name: BERTScore
154
- type: bertscore
155
  value: 0.9095144685254328
156
- - name: MoverScore
157
- type: moverscore
158
  value: 0.6059332247878408
159
  - task:
160
  name: Text2text Generation
@@ -164,20 +164,20 @@ model-index:
164
  type: books
165
  args: books
166
  metrics:
167
- - name: BLEU4
168
- type: bleu4
169
  value: 0.006278914808207679
170
- - name: ROUGE-L
171
- type: rouge-l
172
  value: 0.12368226019088967
173
- - name: METEOR
174
- type: meteor
175
  value: 0.11576293675813865
176
- - name: BERTScore
177
- type: bertscore
178
  value: 0.8807110440044503
179
- - name: MoverScore
180
- type: moverscore
181
  value: 0.5555905941686486
182
  - task:
183
  name: Text2text Generation
@@ -187,20 +187,20 @@ model-index:
187
  type: electronics
188
  args: electronics
189
  metrics:
190
- - name: BLEU4
191
- type: bleu4
192
  value: 0.00866799444965211
193
- - name: ROUGE-L
194
- type: rouge-l
195
  value: 0.1601628874804186
196
- - name: METEOR
197
- type: meteor
198
  value: 0.15348605312210778
199
- - name: BERTScore
200
- type: bertscore
201
  value: 0.8783386920680519
202
- - name: MoverScore
203
- type: moverscore
204
  value: 0.5634845371093992
205
  - task:
206
  name: Text2text Generation
@@ -210,20 +210,20 @@ model-index:
210
  type: grocery
211
  args: grocery
212
  metrics:
213
- - name: BLEU4
214
- type: bleu4
215
  value: 0.00528043272450429
216
- - name: ROUGE-L
217
- type: rouge-l
218
  value: 0.12343711316491492
219
- - name: METEOR
220
- type: meteor
221
  value: 0.15133496445452477
222
- - name: BERTScore
223
- type: bertscore
224
  value: 0.8778951253890991
225
- - name: MoverScore
226
- type: moverscore
227
  value: 0.5701949938103265
228
  - task:
229
  name: Text2text Generation
@@ -233,20 +233,20 @@ model-index:
233
  type: movies
234
  args: movies
235
  metrics:
236
- - name: BLEU4
237
- type: bleu4
238
  value: 1.0121579426501661e-06
239
- - name: ROUGE-L
240
- type: rouge-l
241
  value: 0.12508697028506718
242
- - name: METEOR
243
- type: meteor
244
  value: 0.11862284941640638
245
- - name: BERTScore
246
- type: bertscore
247
  value: 0.8748829724726739
248
- - name: MoverScore
249
- type: moverscore
250
  value: 0.5528899173535703
251
  - task:
252
  name: Text2text Generation
@@ -256,20 +256,20 @@ model-index:
256
  type: restaurants
257
  args: restaurants
258
  metrics:
259
- - name: BLEU4
260
- type: bleu4
261
  value: 1.1301750984972448e-06
262
- - name: ROUGE-L
263
- type: rouge-l
264
  value: 0.13083168975354642
265
- - name: METEOR
266
- type: meteor
267
  value: 0.12419733006916912
268
- - name: BERTScore
269
- type: bertscore
270
  value: 0.8797711839570719
271
- - name: MoverScore
272
- type: moverscore
273
  value: 0.5542757411268555
274
  - task:
275
  name: Text2text Generation
@@ -279,20 +279,20 @@ model-index:
279
  type: tripadvisor
280
  args: tripadvisor
281
  metrics:
282
- - name: BLEU4
283
- type: bleu4
284
  value: 8.380171318718442e-07
285
- - name: ROUGE-L
286
- type: rouge-l
287
  value: 0.1402922852924756
288
- - name: METEOR
289
- type: meteor
290
  value: 0.1372146070365174
291
- - name: BERTScore
292
- type: bertscore
293
  value: 0.8891002409937424
294
- - name: MoverScore
295
- type: moverscore
296
  value: 0.5604572211470809
297
  ---
298
 
31
  type: default
32
  args: default
33
  metrics:
34
+ - name: BLEU4 (Question Generation)
35
+ type: bleu4_question_generation
36
  value: 26.17
37
+ - name: ROUGE-L (Question Generation)
38
+ type: rouge_l_question_generation
39
  value: 53.85
40
+ - name: METEOR (Question Generation)
41
+ type: meteor_question_generation
42
  value: 27.07
43
+ - name: BERTScore (Question Generation)
44
+ type: bertscore_question_generation
45
  value: 91.0
46
+ - name: MoverScore (Question Generation)
47
+ type: moverscore_question_generation
48
  value: 64.99
49
+ - name: QAAlignedF1Score-BERTScore (Gold Answer)
50
  type: qa_aligned_f1_score_bertscore_gold_answer
51
  value: 95.54
52
+ - name: QAAlignedRecall-BERTScore (Gold Answer)
53
  type: qa_aligned_recall_bertscore_gold_answer
54
  value: 95.49
55
+ - name: QAAlignedPrecision-BERTScore (Gold Answer)
56
  type: qa_aligned_precision_bertscore_gold_answer
57
  value: 95.59
58
+ - name: QAAlignedF1Score-MoverScore (Gold Answer)
59
  type: qa_aligned_f1_score_moverscore_gold_answer
60
  value: 70.82
61
+ - name: QAAlignedRecall-MoverScore (Gold Answer)
62
  type: qa_aligned_recall_moverscore_gold_answer
63
  value: 70.54
64
+ - name: QAAlignedPrecision-MoverScore (Gold Answer)
65
  type: qa_aligned_precision_moverscore_gold_answer
66
  value: 71.13
67
  - task:
72
  type: amazon
73
  args: amazon
74
  metrics:
75
+ - name: BLEU4 (Question Generation)
76
+ type: bleu4_question_generation
77
  value: 0.06530369842068952
78
+ - name: ROUGE-L (Question Generation)
79
+ type: rouge_l_question_generation
80
  value: 0.25030985091008146
81
+ - name: METEOR (Question Generation)
82
+ type: meteor_question_generation
83
  value: 0.2229994442645732
84
+ - name: BERTScore (Question Generation)
85
+ type: bertscore_question_generation
86
  value: 0.9092814804525936
87
+ - name: MoverScore (Question Generation)
88
+ type: moverscore_question_generation
89
  value: 0.6086538514008419
90
  - task:
91
  name: Text2text Generation
95
  type: new_wiki
96
  args: new_wiki
97
  metrics:
98
+ - name: BLEU4 (Question Generation)
99
+ type: bleu4_question_generation
100
  value: 0.11118273173452982
101
+ - name: ROUGE-L (Question Generation)
102
+ type: rouge_l_question_generation
103
  value: 0.2967546690273089
104
+ - name: METEOR (Question Generation)
105
+ type: meteor_question_generation
106
  value: 0.27315087810722966
107
+ - name: BERTScore (Question Generation)
108
+ type: bertscore_question_generation
109
  value: 0.9322739617807421
110
+ - name: MoverScore (Question Generation)
111
+ type: moverscore_question_generation
112
  value: 0.6623000084761579
113
  - task:
114
  name: Text2text Generation
118
  type: nyt
119
  args: nyt
120
  metrics:
121
+ - name: BLEU4 (Question Generation)
122
+ type: bleu4_question_generation
123
  value: 0.08117757543966063
124
+ - name: ROUGE-L (Question Generation)
125
+ type: rouge_l_question_generation
126
  value: 0.25292097720734297
127
+ - name: METEOR (Question Generation)
128
+ type: meteor_question_generation
129
  value: 0.25254205113198686
130
+ - name: BERTScore (Question Generation)
131
+ type: bertscore_question_generation
132
  value: 0.9249009759439454
133
+ - name: MoverScore (Question Generation)
134
+ type: moverscore_question_generation
135
  value: 0.6406329128556304
136
  - task:
137
  name: Text2text Generation
141
  type: reddit
142
  args: reddit
143
  metrics:
144
+ - name: BLEU4 (Question Generation)
145
+ type: bleu4_question_generation
146
  value: 0.059525104157825456
147
+ - name: ROUGE-L (Question Generation)
148
+ type: rouge_l_question_generation
149
  value: 0.22365090580055863
150
+ - name: METEOR (Question Generation)
151
+ type: meteor_question_generation
152
  value: 0.21499800504546457
153
+ - name: BERTScore (Question Generation)
154
+ type: bertscore_question_generation
155
  value: 0.9095144685254328
156
+ - name: MoverScore (Question Generation)
157
+ type: moverscore_question_generation
158
  value: 0.6059332247878408
159
  - task:
160
  name: Text2text Generation
164
  type: books
165
  args: books
166
  metrics:
167
+ - name: BLEU4 (Question Generation)
168
+ type: bleu4_question_generation
169
  value: 0.006278914808207679
170
+ - name: ROUGE-L (Question Generation)
171
+ type: rouge_l_question_generation
172
  value: 0.12368226019088967
173
+ - name: METEOR (Question Generation)
174
+ type: meteor_question_generation
175
  value: 0.11576293675813865
176
+ - name: BERTScore (Question Generation)
177
+ type: bertscore_question_generation
178
  value: 0.8807110440044503
179
+ - name: MoverScore (Question Generation)
180
+ type: moverscore_question_generation
181
  value: 0.5555905941686486
182
  - task:
183
  name: Text2text Generation
187
  type: electronics
188
  args: electronics
189
  metrics:
190
+ - name: BLEU4 (Question Generation)
191
+ type: bleu4_question_generation
192
  value: 0.00866799444965211
193
+ - name: ROUGE-L (Question Generation)
194
+ type: rouge_l_question_generation
195
  value: 0.1601628874804186
196
+ - name: METEOR (Question Generation)
197
+ type: meteor_question_generation
198
  value: 0.15348605312210778
199
+ - name: BERTScore (Question Generation)
200
+ type: bertscore_question_generation
201
  value: 0.8783386920680519
202
+ - name: MoverScore (Question Generation)
203
+ type: moverscore_question_generation
204
  value: 0.5634845371093992
205
  - task:
206
  name: Text2text Generation
210
  type: grocery
211
  args: grocery
212
  metrics:
213
+ - name: BLEU4 (Question Generation)
214
+ type: bleu4_question_generation
215
  value: 0.00528043272450429
216
+ - name: ROUGE-L (Question Generation)
217
+ type: rouge_l_question_generation
218
  value: 0.12343711316491492
219
+ - name: METEOR (Question Generation)
220
+ type: meteor_question_generation
221
  value: 0.15133496445452477
222
+ - name: BERTScore (Question Generation)
223
+ type: bertscore_question_generation
224
  value: 0.8778951253890991
225
+ - name: MoverScore (Question Generation)
226
+ type: moverscore_question_generation
227
  value: 0.5701949938103265
228
  - task:
229
  name: Text2text Generation
233
  type: movies
234
  args: movies
235
  metrics:
236
+ - name: BLEU4 (Question Generation)
237
+ type: bleu4_question_generation
238
  value: 1.0121579426501661e-06
239
+ - name: ROUGE-L (Question Generation)
240
+ type: rouge_l_question_generation
241
  value: 0.12508697028506718
242
+ - name: METEOR (Question Generation)
243
+ type: meteor_question_generation
244
  value: 0.11862284941640638
245
+ - name: BERTScore (Question Generation)
246
+ type: bertscore_question_generation
247
  value: 0.8748829724726739
248
+ - name: MoverScore (Question Generation)
249
+ type: moverscore_question_generation
250
  value: 0.5528899173535703
251
  - task:
252
  name: Text2text Generation
256
  type: restaurants
257
  args: restaurants
258
  metrics:
259
+ - name: BLEU4 (Question Generation)
260
+ type: bleu4_question_generation
261
  value: 1.1301750984972448e-06
262
+ - name: ROUGE-L (Question Generation)
263
+ type: rouge_l_question_generation
264
  value: 0.13083168975354642
265
+ - name: METEOR (Question Generation)
266
+ type: meteor_question_generation
267
  value: 0.12419733006916912
268
+ - name: BERTScore (Question Generation)
269
+ type: bertscore_question_generation
270
  value: 0.8797711839570719
271
+ - name: MoverScore (Question Generation)
272
+ type: moverscore_question_generation
273
  value: 0.5542757411268555
274
  - task:
275
  name: Text2text Generation
279
  type: tripadvisor
280
  args: tripadvisor
281
  metrics:
282
+ - name: BLEU4 (Question Generation)
283
+ type: bleu4_question_generation
284
  value: 8.380171318718442e-07
285
+ - name: ROUGE-L (Question Generation)
286
+ type: rouge_l_question_generation
287
  value: 0.1402922852924756
288
+ - name: METEOR (Question Generation)
289
+ type: meteor_question_generation
290
  value: 0.1372146070365174
291
+ - name: BERTScore (Question Generation)
292
+ type: bertscore_question_generation
293
  value: 0.8891002409937424
294
+ - name: MoverScore (Question Generation)
295
+ type: moverscore_question_generation
296
  value: 0.5604572211470809
297
  ---
298