Datasets:
File size: 2,097 Bytes
668e609 93b675d 668e609 2a60811 e3d51f0 9f3da5f e3d51f0 94ede51 4fdee2e f21ab84 4fdee2e f21ab84 44d0277 4fdee2e 44d0277 4fdee2e 56ef65c 4fdee2e 996c740 4fdee2e 996c740 4fdee2e 996c740 7582e67 7267d02 4fdee2e f1e66aa 7267d02 f21ab84 4fdee2e 7fb9561 e3d51f0 9f3da5f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 |
---
license: cc-by-4.0
task_categories:
- text-generation
- text-classification
- token-classification
- question-answering
- zero-shot-classification
- translation
- summarization
language:
- en
size_categories:
- 10M<n<100M
---
# Task Name
- **FLAN-2021 -> 70**
```json
{
"ag_news_subset": 108497,
"ai2_arc/ARC-Challenge": 829,
"ai2_arc/ARC-Easy": 1927,
"aeslc": 13187,
"anli/r1": 15361,
"anli/r2": 41133,
"anli/r3": 91048,
"bool_q": 8343,
"cnn_dailymail": 259607,
"coqa": 6456,
"cosmos_qa": 22996,
"definite_pronoun_resolution": 1079,
"drop": 70045,
"fix_punct": 25690,
"gem/common_gen": 60936,
"gem/dart": 56724,
"gem/e2e_nlg": 30337,
"gem/web_nlg_en": 31899,
"gem/wiki_lingua_english_en": 89452,
"gigaword": 1853123,
"glue/cola": 7594,
"glue/mnli": 711413,
"glue/mrpc": 3117,
"glue/qnli": 94453,
"glue/qqp": 329860,
"glue/sst2": 61011,
"glue/stsb": 5085,
"glue/wnli": 600,
"hellaswag": 35941,
"xsum": 184162,
"imdb_reviews/plain_text": 22725,
"lambada": 4467,
"math_dataset/algebra__linear_1d": 1814247,
"multi_news": 40646,
"natural_questions_open": 79342,
"newsroom": 900966,
"openbookqa": 4471,
"opinion_abstracts/idebate": 1554,
"opinion_abstracts/rotten_tomatoes": 2908,
"para_crawl_enes": 27430,
"paws_wiki": 44831,
"piqa": 14594,
"quac": 75448,
"samsum": 13232,
"sentiment140": 1451736,
"snli": 498328,
"squad/v1": 79305,
"squad/v2": 117979,
"story_cloze": 1538,
"super_glue/cb": 165,
"super_glue/copa": 336,
"super_glue/multirc": 24349,
"super_glue/record": 90486,
"super_glue/rte": 2064,
"super_glue/wic": 4783,
"super_glue/wsc": 440,
"trec": 4679,
"trivia_qa": 79623,
"true_case": 26581,
"unified_qa_science_inst": 560,
"winogrande": 36218,
"word_segment": 27256,
"wmt14_translate/fr-en": 9070285,
"wmt16_translate/cs-en": 9066896,
"wmt16_translate/de-en": 4124373,
"wmt16_translate/fi-en": 1880481,
"wmt16_translate/ro-en": 553110,
"wmt16_translate/ru-en": 2280872,
"wmt16_translate/tr-en": 186016,
"yelp_polarity_reviews": 507373
}
``` |