Datasets:
File size: 1,849 Bytes
668e609 2a60811 e3d51f0 9f3da5f e3d51f0 9f3da5f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
---
license: mit
---
# Task Name
- **FLAN-2021 -> 70**
```json
{
"ag_news_subset": null,
"ai2_arc_ARC-Challenge": null,
"ai2_arc_ARC-Easy": null,
"aeslc": null,
"anli_r1": null,
"anli_r2": null,
"anli_r3": null,
"bool_q": null,
"cnn_dailymail": null,
"coqa": null,
"cosmos_qa": null,
"definite_pronoun_resolution": null,
"drop": null,
"fix_punct": null,
"gem_common_gen": null,
"gem_dart": null,
"gem_e2e_nlg": null,
"gem_web_nlg_en": null,
"gem_wiki_lingua_english_en": null,
"gigaword": null,
"glue_cola": null,
"glue_mnli": null,
"glue_mrpc": null,
"glue_qnli": null,
"glue_qqp": null,
"glue_sst2": null,
"glue_stsb": null,
"glue_wnli": null,
"hellaswag": null,
"huggingface_xsum": null,
"imdb_reviews_plain_text": null,
"lambada": null,
"math_dataset_algebra__linear_1d": null,
"multi_news": null,
"natural_questions_open": null,
"newsroom": null,
"openbookqa": null,
"opinion_abstracts_idebate": null,
"opinion_abstracts_rotten_tomatoes": null,
"para_crawl_enes": null,
"paws_wiki": null,
"piqa": null,
"quac": null,
"samsum": null,
"sentiment140": null,
"snli": null,
"squad_v1_1": null,
"squad_v2_0": null,
"story_cloze_2016": null,
"super_glue_cb": null,
"super_glue_copa": null,
"super_glue_multirc": null,
"super_glue_record": null,
"super_glue_rte": null,
"super_glue_wic": null,
"super_glue_wsc_fixed": null,
"trec": null,
"trivia_qa_rc": null,
"true_case": null,
"unified_qa_science_inst": null,
"winogrande": null,
"word_segment": null,
"wmt14_translate_fr-en": null,
"wmt16_translate_cs-en": null,
"wmt16_translate_de-en": null,
"wmt16_translate_fi-en": null,
"wmt16_translate_ro-en": null,
"wmt16_translate_ru-en": null,
"wmt16_translate_tr-en": null,
"yelp_polarity_reviews": null
}
``` |