--- license: cc-by-4.0 task_categories: - text-generation - text-classification - token-classification - question-answering - zero-shot-classification - translation - summarization language: - en size_categories: - 10M 70** ```json { "ag_news_subset": 108497, "ai2_arc_ARC-Challenge": 829, "ai2_arc_ARC-Easy": 1927, "aeslc": 13187, "anli_r1": 15361, "anli_r2": 41133, "anli_r3": 91048, "bool_q": 8343, "cnn_dailymail": 259607, "coqa": 6456, "cosmos_qa": 22996, "definite_pronoun_resolution": 1079, "drop": 70045, "fix_punct": 25690, "gem_common_gen": 60936, "gem_dart": 56724, "gem_e2e_nlg": 30337, "gem_web_nlg_en": 31899, "gem_wiki_lingua_english_en": 89452, "gigaword": 1853123, "glue_cola": 7594, "glue_mnli": 711413, "glue_mrpc": 3117, "glue_qnli": 94453, "glue_qqp": 329860, "glue_sst2": 61011, "glue_stsb": 5085, "glue_wnli": 600, "hellaswag": 35941, "huggingface_xsum": 184162, "imdb_reviews_plain_text": null, "lambada": 4467, "math_dataset_algebra__linear_1d": null, "multi_news": 40646, "natural_questions_open": 79342, "newsroom": 900966, "openbookqa": 4471, "opinion_abstracts_idebate": 1554, "opinion_abstracts_rotten_tomatoes": 2908, "para_crawl_enes": 27430, "paws_wiki": 44831, "piqa": 14594, "quac": null, "samsum": null, "sentiment140": null, "snli": null, "squad_v1_1": null, "squad_v2_0": null, "story_cloze_2016": null, "super_glue_cb": 165, "super_glue_copa": 336, "super_glue_multirc": 24349, "super_glue_record": 90486, "super_glue_rte": 2064, "super_glue_wic": 4783, "super_glue_wsc_fixed": null, "trec": 4679, "trivia_qa_rc": null, "true_case": 26581, "unified_qa_science_inst": 560, "winogrande": 36218, "word_segment": 27256, "wmt14_translate_fr-en": 9070285, "wmt16_translate_cs-en": 9066896, "wmt16_translate_de-en": 4124373, "wmt16_translate_fi-en": 1880481, "wmt16_translate_ro-en": 553110, "wmt16_translate_ru-en": 2280872, "wmt16_translate_tr-en": 186016, "yelp_polarity_reviews": 507373 } ``` ```python text_classification = [ "ag_news_subset", "glue_cola", "glue_sst2", "imdb_reviews_plain_text", "yelp_polarity_reviews" ] question_answering = [ "ai2_arc_ARC-Challenge", "ai2_arc_ARC-Easy", "bool_q", "coqa", "cosmos_qa", "drop", "natural_questions_open", "openbookqa", "quac", "squad_v1_1", "squad_v2_0", "trivia_qa_rc" ] text_generation = [ "aeslc", "cnn_dailymail", "gem_common_gen", "gem_dart", "gem_e2e_nlg", "gem_web_nlg_en", "gem_wiki_lingua_english_en", "gigaword", "huggingface_xsum", "lambada", "multi_news", "newsroom", "samsum" ] translation = [ "wmt14_translate_fr-en", "wmt16_translate_cs-en", "wmt16_translate_de-en", "wmt16_translate_fi-en", "wmt16_translate_ro-en", "wmt16_translate_ru-en", "wmt16_translate_tr-en" ] sentiment_analysis = [ "sentiment140" ] textual_entailment = [ "anli_r1", "anli_r2", "anli_r3", "glue_mnli", "glue_rte", "snli", "super_glue_cb", "super_glue_copa", "super_glue_rte" ] paraphrase_detection = [ "glue_mrpc", "glue_qqp", "paws_wiki" ] commonsense_reasoning = [ "hellaswag", "piqa", "super_glue_multirc", "super_glue_record", "super_glue_wic", "super_glue_wsc_fixed", "winogrande" ] textual_similarity = [ "glue_stsb" ] named_entity_recognition = [ "glue_wnli" ] text_correction = [ "fix_punct", "true_case" ] text_segmentation = [ "word_segment" ] argument_mining = [ "opinion_abstracts_idebate", "opinion_abstracts_rotten_tomatoes" ] machine_reading_comprehension = [ "glue_qnli" ] text_summarization = [ "trec" ] language_modelling = [ "story_cloze_2016" ] math_problem_solving = [ "math_dataset_algebra__linear_1d", "unified_qa_science_inst" ] cross_lingual_information_retrieval = [ "para_crawl_enes" ] ```