{"default": { "description": "Wikipedia dataset containing cleaned articles of all languages.\nThe datasets are built from the Wikipedia dump\n(https://dumps.wikimedia.org/) with one split per language. Each example\ncontains the content of one full Wikipedia article with cleaning to strip\nmarkdown and unwanted sections (references, etc.).", "citation": "@ONLINE {wikidump,\n author = {Wikimedia Foundation},\n title = {Wikimedia Downloads},\n url = {https://dumps.wikimedia.org}\n}", "homepage": "https://dumps.wikimedia.org", "license": "", "features": { "tokens": { "feature": { "dtype": "string", "_type": "Value" }, "_type": "Sequence" }, "id": { "dtype": "int64", "_type": "Value" }, "text": { "dtype": "string", "_type": "Value" } }, "splits": { "validation": { "name": "validation", "num_bytes": 301864191, "num_examples": 21817, "dataset_name": null }, "train": { "name": "train", "num_bytes": 60558566627, "num_examples": 4368542, "dataset_name": null } }, "download_size": 20321590769, "dataset_size": 60860430818, "size_in_bytes": 81182021587 }}