aslawliet commited on
Commit
7582e67
1 Parent(s): 996c740

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +8 -133
README.md CHANGED
@@ -47,9 +47,9 @@ size_categories:
47
  "glue_wnli": 600,
48
  "hellaswag": 35941,
49
  "huggingface_xsum": 184162,
50
- "imdb_reviews_plain_text": null,
51
  "lambada": 4467,
52
- "math_dataset_algebra__linear_1d": null,
53
  "multi_news": 40646,
54
  "natural_questions_open": 79342,
55
  "newsroom": 900966,
@@ -59,20 +59,20 @@ size_categories:
59
  "para_crawl_enes": 27430,
60
  "paws_wiki": 44831,
61
  "piqa": 14594,
62
- "quac": null,
63
- "samsum": null,
64
- "sentiment140": null,
65
- "snli": null,
66
  "squad_v1_1": null,
67
  "squad_v2_0": null,
68
- "story_cloze_2016": null,
69
  "super_glue_cb": 165,
70
  "super_glue_copa": 336,
71
  "super_glue_multirc": 24349,
72
  "super_glue_record": 90486,
73
  "super_glue_rte": 2064,
74
  "super_glue_wic": 4783,
75
- "super_glue_wsc_fixed": null,
76
  "trec": 4679,
77
  "trivia_qa_rc": null,
78
  "true_case": 26581,
@@ -88,129 +88,4 @@ size_categories:
88
  "wmt16_translate_tr-en": 186016,
89
  "yelp_polarity_reviews": 507373
90
  }
91
- ```
92
- ```python
93
- text_classification = [
94
- "ag_news_subset",
95
- "glue_cola",
96
- "glue_sst2",
97
- "imdb_reviews_plain_text",
98
- "yelp_polarity_reviews"
99
- ]
100
-
101
- question_answering = [
102
- "ai2_arc_ARC-Challenge",
103
- "ai2_arc_ARC-Easy",
104
- "bool_q",
105
- "coqa",
106
- "cosmos_qa",
107
- "drop",
108
- "natural_questions_open",
109
- "openbookqa",
110
- "quac",
111
- "squad_v1_1",
112
- "squad_v2_0",
113
- "trivia_qa_rc"
114
- ]
115
-
116
- text_generation = [
117
- "aeslc",
118
- "cnn_dailymail",
119
- "gem_common_gen",
120
- "gem_dart",
121
- "gem_e2e_nlg",
122
- "gem_web_nlg_en",
123
- "gem_wiki_lingua_english_en",
124
- "gigaword",
125
- "huggingface_xsum",
126
- "lambada",
127
- "multi_news",
128
- "newsroom",
129
- "samsum"
130
- ]
131
-
132
- translation = [
133
- "wmt14_translate_fr-en",
134
- "wmt16_translate_cs-en",
135
- "wmt16_translate_de-en",
136
- "wmt16_translate_fi-en",
137
- "wmt16_translate_ro-en",
138
- "wmt16_translate_ru-en",
139
- "wmt16_translate_tr-en"
140
- ]
141
-
142
- sentiment_analysis = [
143
- "sentiment140"
144
- ]
145
-
146
- textual_entailment = [
147
- "anli_r1",
148
- "anli_r2",
149
- "anli_r3",
150
- "glue_mnli",
151
- "glue_rte",
152
- "snli",
153
- "super_glue_cb",
154
- "super_glue_copa",
155
- "super_glue_rte"
156
- ]
157
-
158
- paraphrase_detection = [
159
- "glue_mrpc",
160
- "glue_qqp",
161
- "paws_wiki"
162
- ]
163
-
164
- commonsense_reasoning = [
165
- "hellaswag",
166
- "piqa",
167
- "super_glue_multirc",
168
- "super_glue_record",
169
- "super_glue_wic",
170
- "super_glue_wsc_fixed",
171
- "winogrande"
172
- ]
173
-
174
- textual_similarity = [
175
- "glue_stsb"
176
- ]
177
-
178
- named_entity_recognition = [
179
- "glue_wnli"
180
- ]
181
-
182
- text_correction = [
183
- "fix_punct",
184
- "true_case"
185
- ]
186
-
187
- text_segmentation = [
188
- "word_segment"
189
- ]
190
-
191
- argument_mining = [
192
- "opinion_abstracts_idebate",
193
- "opinion_abstracts_rotten_tomatoes"
194
- ]
195
-
196
- machine_reading_comprehension = [
197
- "glue_qnli"
198
- ]
199
-
200
- text_summarization = [
201
- "trec"
202
- ]
203
-
204
- language_modelling = [
205
- "story_cloze_2016"
206
- ]
207
-
208
- math_problem_solving = [
209
- "math_dataset_algebra__linear_1d",
210
- "unified_qa_science_inst"
211
- ]
212
-
213
- cross_lingual_information_retrieval = [
214
- "para_crawl_enes"
215
- ]
216
  ```
 
47
  "glue_wnli": 600,
48
  "hellaswag": 35941,
49
  "huggingface_xsum": 184162,
50
+ "imdb_reviews_plain_text": 22725,
51
  "lambada": 4467,
52
+ "math_dataset_algebra__linear_1d": 1814247,
53
  "multi_news": 40646,
54
  "natural_questions_open": 79342,
55
  "newsroom": 900966,
 
59
  "para_crawl_enes": 27430,
60
  "paws_wiki": 44831,
61
  "piqa": 14594,
62
+ "quac": 75448,
63
+ "samsum": 13232,
64
+ "sentiment140": 1451736,
65
+ "snli": 498328,
66
  "squad_v1_1": null,
67
  "squad_v2_0": null,
68
+ "story_cloze_2016": 1538,
69
  "super_glue_cb": 165,
70
  "super_glue_copa": 336,
71
  "super_glue_multirc": 24349,
72
  "super_glue_record": 90486,
73
  "super_glue_rte": 2064,
74
  "super_glue_wic": 4783,
75
+ "super_glue_wsc_fixed": 440,
76
  "trec": 4679,
77
  "trivia_qa_rc": null,
78
  "true_case": 26581,
 
88
  "wmt16_translate_tr-en": 186016,
89
  "yelp_polarity_reviews": 507373
90
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  ```